content
stringlengths
5
1.05M
import pandas as pd from echelon import EchelonBT from test_echelon import TestApp import numpy as np import seaborn as sns import matplotlib.pyplot as plt import warnings warnings.filterwarnings("ignore") data_master = pd.read_pickle('master_pricing_df') stock_list = ['AAPL', 'GOOGL', 'GE', 'LUV'] app_goog = data_master[stock_list] weights_master = pd.DataFrame(index=app_goog.index) #num_periods = 10 #for stock in stock_list: num_periods = np.linspace(2,33,30) rolling_windows = np.linspace(2,33,30) returns_df = np.zeros((len(num_periods),len(rolling_windows))) def backtest(): app = TestApp() for i,num_period in enumerate(num_periods): for stock in stock_list: weights_master[stock] = (data_master[stock].shift(1) - data_master[stock].shift(int(num_period))) /data_master[stock].shift(int(num_period)) for j,rolling_window in enumerate(rolling_windows): app.stock_list = stock_list app.master_dataframe = app_goog app.weights = weights_master app.place_trades = False app.long_only = True app.optimize = True app.stats = False app.num_periods = num_period app.rolling_window = rolling_window app.run() returns_df[i,j] = app.portfolio.iloc[-1] print(returns_df) backtest()
import tools.hal2doc import tools.search
''' Description: Given an n x n binary matrix grid, return the length of the shortest clear path in the matrix. If there is no clear path, return -1. A clear path in a binary matrix is a path from the top-left cell (i.e., (0, 0)) to the bottom-right cell (i.e., (n - 1, n - 1)) such that: All the visited cells of the path are 0. All the adjacent cells of the path are 8-directionally connected (i.e., they are different and they share an edge or a corner). The length of a clear path is the number of visited cells of this path. Example 1: Input: grid = [[0,1],[1,0]] Output: 2 Example 2: Input: grid = [[0,0,0],[1,1,0],[1,1,0]] Output: 4 Example 3: Input: grid = [[1,0,0],[1,1,0],[1,1,0]] Output: -1 Constraints: n == grid.length n == grid[i].length 1 <= n <= 100 grid[i][j] is 0 or 1 ''' from heapq import heappush, heappop from itertools import product class Solution: def shortestPathBinaryMatrix(self, grid): # n = size - 1 n = len(grid)-1 if grid[0][0] or grid[n][n]: # Quick rejection when source or destination has obstacle return -1 # direction vector directions = [x for x in product((-1, 0, 1), repeat=2) if x != (0,0)] # traversal queue, # parameter: evaluation, current step, current i, current j q = [(n + 1, 1, n, n)] grid[n][n] = -1 # the step of the start is 1 # go from destination to source while q: # pop one grid with minimal evaluation value _, step, i, j = heappop(q) if (i, j) == (0, 0): # arrive the source, return minimal step return step # explore each possible next move for di, dj in directions: newI, newJ = i+di, j+dj # Except grid[i][j] = 1, we need to search and update if 0 <= newI <= n and 0 <= newJ <= n and grid[newI][newJ] < 1: newStep = step + 1 # if we have new visit or can have fewer steps, update if grid[newI][newJ] == 0 or grid[newI][newJ] < -newStep: # store new minimal step grid[newI][newJ] = -newStep # compute evaluation based on coordination evaluation = max(newI, newJ) + newStep # add current move to traversal queue heappush(q, (evaluation, newStep, newI, newJ)) return -1 # m: the height of grid # n: the width of grid ## Time Complexity: O( m*n ) # # The overhead in time is the cost of BFS traversal with min-heap, which is of O( m*n ) ## Space Complexity: O( m*n ) # # The overhead in space is the cost of min-heap as traversal queue, which is of O( m*n ) import unittest class Testing( unittest.TestCase ): def test_case_1( self ): result = Solution().shortestPathBinaryMatrix( grid=[[0,1],[1,0]] ) self.assertEqual(result, 2) def test_case_2( self ): result = Solution().shortestPathBinaryMatrix( grid=[[0,0,0],[1,1,0],[1,1,0]] ) self.assertEqual(result, 4) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- __version__ = '0.3.0' import threading import os import logging # logging.basicConfig(level=logging.ERROR) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger(__name__) class PinState(object): """An ultra simple pin-state object. Keeps track data related to each pin. Args: value: the file pointer to set/read value of pin. direction: the file pointer to set/read direction of the pin. """ def __init__(self, value, direction): self.value = value self.direction = direction path = os.path pjoin = os.path.join gpio_root = '/tmp/sys/class/gpio' gpiopath = lambda pin: os.path.join(gpio_root, 'gpio{0}'.format(pin)) _export_lock = threading.Lock() _pyset = set _open = dict() FMODE = 'w' IN, OUT = 'in', 'out' LOW, HIGH = 'low', 'high' def _write(f, v): log.debug("writing: {0}: {1}".format(f, v)) f.seek(0) f.truncate() f.write(str(v)) f.flush() def _read(f): log.debug("Reading: {0}".format(f)) f.seek(0) return f.read().strip() def _verify(function): """decorator to ensure pin is properly set up""" # @functools.wraps def wrapped(pin, *args, **kwargs): pin = int(pin) if pin not in _open: ppath = gpiopath(pin) try: value = open(pjoin(ppath, 'value'), FMODE) except IOError: log.debug("Creating Pin {0}".format(pin)) os.makedirs(ppath) value = open(pjoin(ppath, 'value'), FMODE) direction = open(pjoin(ppath, 'direction'), FMODE) _open[pin] = PinState(value=value, direction=direction) return function(pin, *args, **kwargs) return wrapped def cleanup(pin=None, assert_exists=False): """Cleanup the pin by closing and unexporting it. Args: pin (int, optional): either the pin to clean up or None (default). If None, clean up all pins. assert_exists: if True, raise a ValueError if the pin was not setup. Otherwise, this function is a NOOP. """ if pin is None: # Take a list of keys because we will be deleting from _open for pin in list(_open): cleanup(pin) return if not isinstance(pin, int): raise TypeError("pin must be an int, got: {}".format(pin)) state = _open.get(pin) if state is None: if assert_exists: raise ValueError("pin {} was not setup".format(pin)) return state.value.close() state.direction.close() if os.path.exists(gpiopath(pin)): log.debug("Unexporting pin {0}".format(pin)) with _export_lock: with open(pjoin(gpio_root, 'unexport'), 'w') as f: _write(f, pin) del _open[pin] @_verify def setup(pin, mode, pullup=None, initial=False): '''Setup pin with mode IN or OUT. Args: pin (int): mode (str): use either gpio.OUT or gpio.IN pullup (None): rpio compatibility. If anything but None, raises value Error pullup (bool, optional): Initial pin value. Default is False ''' if pullup is not None: raise ValueError("sysfs does not support pullups") if mode not in (IN, OUT, LOW, HIGH): raise ValueError(mode) log.debug("Setup {0}: {1}".format(pin, mode)) f = _open[pin].direction _write(f, mode) if mode == OUT: if initial: set(pin, 1) else: set(pin, 0) def mode(pin): '''get the pin mode Returns: str: "in" or "out" ''' pin = int(pin) ppath = gpiopath(pin) with open(pjoin(ppath, 'direction'), 'r') as f: f.seek(0) return f.read().strip() def read(pin): '''read the pin value Returns: bool: 0 or 1 ''' pin = int(pin) ppath = gpiopath(pin) with open(pjoin(ppath, 'value'), 'r') as f: f.seek(0) return int(f.read().strip()) @_verify def set(pin, value): '''set the pin value to 0 or 1''' if value is LOW: value = 0 value = int(bool(value)) log.debug("Write {0}: {1}".format(pin, value)) f = _open[pin].value _write(f, value) def input(pin): '''read the pin. Same as read''' return read(pin) @_verify def output(pin, value): '''set the pin. Same as set''' return set(pin, value) def setwarnings(value): '''exists for rpio compatibility''' pass def setmode(value): '''exists for rpio compatibility''' pass BCM = None # rpio compatibility
import hashlib import json from django.contrib.gis.db import models from bims.models.location_site import ( location_site_post_save_handler, LocationSite ) from bims.models.spatial_scale import SpatialScale from bims.models.spatial_scale_group import SpatialScaleGroup from bims.utils.logger import log def array_to_dict(array, key_name='key'): dictionary = {} for data_dict in array: for data_key, data_value in data_dict.iteritems(): if isinstance(data_value, list): formatted_dict = array_to_dict(data_value) if formatted_dict: data_dict[data_key] = formatted_dict elif data_value: if isinstance(data_value, float): continue if data_value.isdigit(): data_dict[data_key] = int(data_value) else: try: data_dict[data_key] = float(data_value) except ValueError: continue else: continue try: dictionary[data_dict[key_name]] = data_dict except KeyError: continue return dictionary def process_spatial_scale_data(location_context_data, group=None): for context_group_value in location_context_data: try: context_group = location_context_data[context_group_value] except TypeError: return if 'value' in context_group: if not context_group['value']: continue spatial_type = 'select' spatial_query = context_group['value'] spatial_scale_group, created = ( SpatialScaleGroup.objects.get_or_create( key=context_group['key'], name=context_group['name'], parent=group )) try: SpatialScale.objects.get_or_create( group=spatial_scale_group, key=context_group['key'], name=context_group['name'], type=spatial_type, query=spatial_query ) except SpatialScale.MultipleObjectsReturned: # shouldn't be happen spatial_scales = SpatialScale.objects.filter( group=spatial_scale_group, key=context_group['key'], name=context_group['name'], type=spatial_type, query=spatial_query ) SpatialScale.objects.filter( id__in=spatial_scales.values_list('id', flat=True)[1:] ).delete() else: spatial_scale_group, created = ( SpatialScaleGroup.objects.get_or_create( key=context_group['key'], name=context_group['name'], parent=group )) if 'service_registry_values' in context_group: process_spatial_scale_data( context_group['service_registry_values'], group=spatial_scale_group ) def format_location_context(location_site_id, force_update=False): try: location_site = LocationSite.objects.get( id=location_site_id ) except LocationSite.DoesNotExist: log('LocationSite Does Not Exist', 'debug') return if not location_site.location_context_document: log('LocationSite context document does not exist', 'debug') return location_context = json.loads(location_site.location_context_document) hash_string = hashlib.md5( location_site.location_context_document ).hexdigest() formatted = {} if location_site.location_context and not force_update: formatted_location_context = json.loads( location_site.location_context ) if not location_site.original_geomorphological: try: context_geo = formatted_location_context[ 'context_group_values'][ 'geomorphological_group']['service_registry_values'][ 'geo_class_recoded']['value'] models.signals.post_save.disconnect( location_site_post_save_handler, ) location_site.original_geomorphological = context_geo location_site.save() models.signals.post_save.connect( location_site_post_save_handler, ) except (KeyError, TypeError): pass if 'hash' in formatted_location_context: if formatted_location_context['hash'] == hash_string: process_spatial_scale_data( formatted_location_context['context_group_values'] ) if location_site.refined_geomorphological: # Update geo value in geocontext data try: context_geo = formatted_location_context[ 'context_group_values'][ 'geomorphological_group'][ 'service_registry_values'][ 'geo_class_recoded']['value'] if ( context_geo == location_site.refined_geomorphological): log('Formatted location context already exist') return except (KeyError, TypeError): log('Formatted location context already exist') return else: log('Formatted location context already exist') return if not isinstance(location_context, dict): return for context_key, context_value in location_context.iteritems(): if isinstance(context_value, list): formatted[context_key] = array_to_dict( context_value, key_name='key') else: formatted[context_key] = context_value models.signals.post_save.disconnect( location_site_post_save_handler, ) if not location_site.original_geomorphological: try: context_geo = formatted[ 'context_group_values'][ 'geomorphological_group']['service_registry_values'][ 'geo_class_recoded']['value'] location_site.original_geomorphological = context_geo except (KeyError, TypeError): pass if location_site.refined_geomorphological: try: formatted['context_group_values'][ 'geomorphological_group']['service_registry_values'][ 'geo_class_recoded']['value'] = ( location_site.refined_geomorphological ) except (KeyError, TypeError): if ( 'geomorphological_group' not in formatted['context_group_values'] ): formatted['context_group_values']['geomorphological_group'] = { 'key': 'geomorphological_group', 'name': 'Geomorphological zones', 'service_registry_values': {} } if ( 'service_registry_values' not in formatted['context_group_values'] ['geomorphological_group'] ): formatted[ 'geomorphological_group'][ 'service_registry_values'] = { 'geo_class_recoded': {} } if ( 'geo_class_recoded' not in formatted['context_group_values'] ['geomorphological_group'] ['service_registry_values'] ): formatted['context_group_values'][ 'geomorphological_group']['service_registry_values'] = { 'geo_class_recoded': { 'name': 'Geomorphological zones', 'key': 'geo_class_recoded', 'value': location_site.refined_geomorphological } } process_spatial_scale_data( formatted['context_group_values'] ) formatted['hash'] = hash_string location_site.location_context = formatted location_site.save() log('Location context formatted', 'info') models.signals.post_save.connect( location_site_post_save_handler, )
import IsoSpecPy from tqdm import tqdm t = 0.0 for x in tqdm(xrange(100000)): i = IsoSpecPy.Iso("C100H100N100O100") t += i.getTheoreticalAverageMass() print t
import enum from flask import session, url_for, redirect import re import datetime def loggedIn(): return session.get("loggedIn", False) def notLoggedInRedir(): if not loggedIn(): print("notLoggedIn") return redirect(url_for("site_login")) else: print("loggedIn") return True def login(userId): session["loggedIn"] = True session["userId"] = userId def logout(): session["loggedIn"] = False session.pop("userId") def listToLoT(l): out = [(i,i) for i in l] return out def addZerosTo(n, num): return "0"*(n-len(str(num)))+str(num) def oneLineTable(*args): out = "<table><tr>" for i in args: out += f"<td>{i}</td>" out += "</tr></table>" return out regex = r'^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$' match_iso8601 = re.compile(regex).match def validate_iso8601(str_val): try: if match_iso8601( str_val ) is not None: return True except: pass return False def datetime_val(string): try: datetime.datetime.fromisoformat(string) return True except: return False def removeAtIndexes(l, index): out = [] for i2, l2 in enumerate(l): out.append([]) for i, item in enumerate(l2): if i != index: out[i2].append(item) return out
"""How to print readable statistics.""" import pprint from dd import cudd import humanize def main(): b = cudd.BDD() b.declare('x', 'y', 'z') u = b.add_expr('x & y & z') u = b.add_expr('x | y | ~ z') stats = b.statistics() pprint.pprint(format_dict(stats)) def format_dict(d): """Return `dict` with values readable by humans.""" return {k: format_number(v) for k, v in d.items()} def format_number(x): """Return readable string for `x`.""" if 0 < x and x < 1: return '{x:1.2}'.format(x=x) return humanize.intword(x) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2018-01-31 07:58 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hkm', '0033_add_product_order_collection_hash'), ] operations = [ migrations.AlterField( model_name='productorder', name='postal_code', field=models.CharField(null=True, verbose_name='Postal code', max_length=64), ), ]
import requests import datetime import logging import re token = { "bearer": None, "expiration": None } credentials = { "client_id": None, "username": None, "password": None, "tenant_id": None, "client_secret": None } log = logging.getLogger() console = logging.StreamHandler() console.setFormatter(logging.Formatter("%(asctime)s\t%(levelname)s -- %(message)s")) log.addHandler(console) log.setLevel(20) HTTP_OK = 200 HTTP_ACCEPTED = 202 def connect(client_id: str, username: str, password: str, tenant_id: str = "common", client_secret: str = None) -> None: global token global credentials if client_secret: body = { "grant_type": "password", "resource": "https://analysis.windows.net/powerbi/api", "client_id": client_id, "client_secret": client_secret, "username": username, "password": password } else: body = { "grant_type": "password", "resource": "https://analysis.windows.net/powerbi/api", "client_id": client_id, "username": username, "password": password } headers = { "Content-Type": "application/x-www-form-urlencoded" } response = requests.post("https://login.microsoftonline.com/{}/oauth2/token".format(tenant_id), headers = headers, data = body) if response.status_code == HTTP_OK: set_credentials(client_id, username, password, tenant_id, client_secret) set_token(response.json()["access_token"]) log.info("Connected to the Power BI REST API with {}".format(username)) else: set_credentials(None, None, None, None, None) set_token(None) log.error("Error {} -- Something went wrong when trying to retrieve the token from the REST API".format(response.status_code)) def verify_token() -> bool: global token if token["bearer"] == None: log.error("Error 401 -- Please connect to the Power BI REST API with the connect() function before") return False else: if token["expiration"] < datetime.datetime.now(): connect(credentials["client_id"], credentials["username"], credentials["password"], credentials["tenant_id"], credentials["client_secret"]) return True else: return True def get_token() -> dict: global token return token def set_token(bearer: str) -> None: global token token["bearer"] = "Bearer {}".format(bearer) token["expiration"] = datetime.datetime.now() + datetime.timedelta(hours = 1) def set_credentials(client_id: str, username: str, password: str, tenant_id: str, client_secret: str) -> None: global credentials credentials["client_id"] = client_id credentials["username"] = username credentials["password"] = password credentials["tenant_id"] = tenant_id credentials["client_secret"] = client_secret # Workspace def get_workspaces() -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups", headers = headers) if response.status_code == HTTP_OK: return response.json()["value"] else: log.error("Error {} -- Something went wrong when trying to retrieve the list of workspaces you have access".format(response.status_code)) return None def get_workspace(workspace_id: str) -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups", headers = headers) if response.status_code == HTTP_OK: ws = [result for result in response.json()["value"] if result["id"] == workspace_id] if(len(ws) > 0): return ws[0] else: return None else: log.error("Error {} -- Something went wrong when trying to retrieve the workspace {}".format(response.status_code, workspace_id)) return None def create_workspace(workspace_name: str, new: bool = False) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } body = { "name": workspace_name } if new: response = requests.post("https://api.powerbi.com/v1.0/myorg/groups?workspaceV2=True", headers = headers, data = body) if response.status_code == HTTP_OK: result = response.json() return { "id": result["id"], "isOnDedicatedCapacity": result["isOnDedicatedCapacity"], "name": result["name"] } else: log.error("Error {} -- Something went wrong when trying to create a new workspace V2 called {}".format(response.status_code, workspace_name)) return None else: response = requests.post("https://api.powerbi.com/v1.0/myorg/groups", headers = headers, data = body) if response.status_code == HTTP_OK: result = response.json() return { "id": result["id"], "isReadOnly": result["isReadOnly"], "isOnDedicatedCapacity": result["isOnDedicatedCapacity"], "name": result["name"] } else: log.error("Error {} -- Something went wrong when trying to create a new workspace called {}".format(response.status_code, workspace_name)) return None def delete_workspace(workspace_id: str) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}".format(workspace_id), headers = headers) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to delete the workspace {}".format(response.status_code, workspace_id)) return None def get_users_in_workspace(workspace_id: str) -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/users".format(workspace_id), headers = headers) if response.status_code == HTTP_OK: return response.json()["value"] else: log.error("Error {} -- Something went wrong when trying to retrieve the list of users in the workspace {}".format(response.status_code, workspace_id)) return None def add_user_to_workspace(workspace_id: str, email: str, access: str = "Member") -> dict: global token if(not verify_token()): return None if(access in ["Admin", "Contributor", "Member"]): headers = { "Authorization": token["bearer"] } body = { "userEmailAddress": email, "groupUserAccessRight": access } response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/users".format(workspace_id), headers = headers, data = body) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to add {} in the workspace {}".format(response.status_code, email, workspace_id)) return None else: log.error("Error 400 -- Please, make sure the access parameter is either \"Admin\", \"Contributor\" or \"Member\"") return None def delete_user_from_workspace(workspace_id: str, email: str) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}/users/{}".format(workspace_id, email), headers = headers) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to delete the user {} from the workspace {}".format(response.status_code, email, workspace_id)) return None def update_user_in_workspace(workspace_id: str, email: str, access: str = "Member") -> dict: global token if(not verify_token()): return None if(access in ["Admin", "Contributor", "Member"]): headers = { "Authorization": token["bearer"] } body = { "userEmailAddress": email, "groupUserAccessRight": access } response = requests.put("https://api.powerbi.com/v1.0/myorg/groups/{}/users".format(workspace_id), headers = headers, data = body) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to update {} in the workspace {}".format(response.status_code, email, workspace_id)) return None else: log.error("Error 400 -- Please, make sure the access parameter is either \"Admin\", \"Contributor\" or \"Member\"") return None # Report def get_reports(workspace_id: str) -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/reports".format(workspace_id), headers = headers) if response.status_code == HTTP_OK: return response.json()["value"] else: log.error("Error {} -- Something went wrong when trying to retrieve the list of reports in the workspace {}".format(response.status_code, workspace_id)) return None def get_report(workspace_id: str, report_id: str) -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}".format(workspace_id, report_id), headers = headers) if response.status_code == HTTP_OK: return response.json() else: log.error("Error {} -- Something went wrong when trying to retrieve the report {} in the workspace {}".format(response.status_code, report_id, workspace_id)) return None def delete_report(workspace_id: str, report_id: str) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}".format(workspace_id, report_id), headers = headers) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to delete the report {} in the workspace {}".format(response.status_code, report_id, workspace_id)) return None def export_report(workspace_id: str, report_id: str, out_file: str) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}/export".format(workspace_id, report_id), headers = headers) if response.status_code == HTTP_OK: with open(out_file, "wb") as file: file.write(response.content) return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to export the report {} in the workspace {}".format(response.status_code, report_id, workspace_id)) return None def import_report(workspace_id: str, report_name: str, in_file: str, name_conflict: str = "CreateOrOverwrite") -> dict: global token if(not verify_token()): return None if(name_conflict in ["CreateOrOverwrite", "GenerateUniqueName", "Ignore", "Overwrite"]): headers = { "Authorization": token["bearer"], "Content-Type": "multipart/form-data" } file = { "file": open(in_file, "rb") } response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/imports?datasetDisplayName={}&nameConflict={}".format(workspace_id, report_name, name_conflict), headers = headers, files = file) if response.status_code == HTTP_ACCEPTED: return response.json() else: log.error("Error {} -- Something went wrong when trying to import the report {} in the workspace {}".format(response.status_code, in_file, workspace_id)) return None else: log.error("Error 400 -- Please, make sure the name_conflict parameter is either \"CreateOrOverwrite\", \"GenerateUniqueName\", \"Ignore\" or \"Overwrite\"") return None def clone_report(workspace_id: str, report_id: str, dest_report_name: str, dest_workspace_id: str = None) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } if dest_workspace_id: body = { "name": dest_report_name, "targetWorkspaceId": dest_workspace_id } else: body = { "name": dest_report_name } response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}/clone".format(workspace_id, report_id), headers = headers, data = body) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to clone the report {} in the workspace {}".format(response.status_code, report_id, workspace_id)) return None # Dataset def get_datasets(workspace_id: str) -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets".format(workspace_id), headers = headers) if response.status_code == HTTP_OK: return response.json()["value"] else: log.error("Error {} -- Something went wrong when trying to retrieve the list of datasets in the workspace {}".format(response.status_code, workspace_id)) return None def get_dataset(workspace_id: str, dataset_id: str) -> list: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets/{}".format(workspace_id, dataset_id), headers = headers) if response.status_code == HTTP_OK: return response.json()["value"] else: log.error("Error {} -- Something went wrong when trying to retrieve the dataset {} in the workspace {}".format(response.status_code, dataset_id, workspace_id)) return None def delete_dataset(workspace_id: str, dataset_id: str) -> dict: global token if(not verify_token()): return None headers = { "Authorization": token["bearer"] } response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets/{}".format(workspace_id, dataset_id), headers = headers) if response.status_code == HTTP_OK: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to delete the dataset {} in the workspace {}".format(response.status_code, dataset_id, workspace_id)) return None def refresh_dataset(workspace_id: str, dataset_id: str, notify_option: str = "NoNotification") -> dict: global token if(not verify_token()): return None if(notify_option in ["MailOnCompletion", "MailOnFailure", "NoNotification"]): headers = { "Authorization": token["bearer"] } body = { "notifyOption": notify_option } response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets/{}/refreshes".format(workspace_id, dataset_id), headers = headers, data = body) if response.status_code == HTTP_ACCEPTED: return { "response": response.status_code } else: log.error("Error {} -- Something went wrong when trying to refresh the dataset {} in the workspace {}".format(response.status_code, dataset_id, workspace_id)) return None else: log.error("Error 400 -- Please, make sure the notify_option parameter is either \"MailOnCompletion\", \"MailOnFailure\" or \"NoNotification\"") return None # Admin def get_audit_logs(start_date: str, end_date: str, activity: str = None, user_id: str = None) -> list: global token if(not verify_token()): return None date_regex = r"^\d\d\d\d-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) (00|1[0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])$" start_date_verification = re.search(date_regex, start_date) end_date_verification = re.search(date_regex, end_date) if(start_date_verification and end_date_verification): start_date_value = datetime.datetime.strptime(start_date, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%dT%H:%M:%S.000Z") end_date_value = datetime.datetime.strptime(end_date, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%dT%H:%M:%S.000Z") headers = { "Authorization": token["bearer"] } params = "" if activity: params += "Activity eq '{}'".format(activity) if user_id: if params != "": params += " and " params += "UserId eq '{}'".format(user_id) if params == "": url = "https://api.powerbi.com/v1.0/myorg/admin/activityevents?startDateTime='{}'&endDateTime='{}'".format(start_date_value, end_date_value) else: url = "https://api.powerbi.com/v1.0/myorg/admin/activityevents?startDateTime='{}'&endDateTime='{}'&$filter={}".format(start_date_value, end_date_value, params) response = requests.get(url, headers = headers) if response.status_code == HTTP_OK: logs = [] while(response.json()["continuationUri"] != None): logs += response.json()["activityEventEntities"] response = requests.get(response.json()["continuationUri"], headers = headers) if response.status_code != HTTP_OK: log.error("Error {} -- Something went wrong when trying to retrieve audit logs from {} to {}".format(response.status_code, start_date, end_date)) return None return logs else: log.error("Error {} -- Something went wrong when trying to retrieve audit logs from {} to {}".format(response.status_code, start_date, end_date)) print(response.json()) return None else: log.error("Error 400 -- Please, make sure the dates you gave match the following pattern: YYYY-MM-DD HH:MM:SS") return None
import logging from margaritashotgun.client import Client __author__ = 'Joel Ferrier' __version__ = '0.2.0' def set_stream_logger(name='margaritashotgun', level=logging.INFO, format_string=None): """ Add a stream handler for the provided name and level to the logging module. >>> import margaritashotgun >>> margaritashotgun.set_stream_logger('marsho', logging.DEBUG) :type name: string :param name: Log name :type level: int :param level: Logging level :type format_string: str :param format_string: Log message format """ if format_string is None: format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s" logger = logging.getLogger(name) logger.setLevel(level) handler = logging.StreamHandler() handler.setLevel(level) formatter = logging.Formatter(format_string) handler.setFormatter(formatter) logger.addHandler(handler) def client(*args, **kwargs): """ Creates a client to orchestrate LiME memory capture See :py:meth:`margaritashotgun.client.Client` """ return Client(*args, **kwargs) class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger('margaritashotgun').addHandler(NullHandler())
import AST as Tree from TopCompiler import Types from TopCompiler import Parser from TopCompiler import Error from TopCompiler import VarParser from TopCompiler import Scope from TopCompiler import ExprParser import collections as coll from TopCompiler import Struct def parseLens(parser): #parser.nextToken() Scope.incrScope(parser) #lensType = Types.parseType(parser) Scope.decrScope(parser) place = Tree.Place(parser) lens = Tree.Lens(parser) lens.place = place parser.currentNode.addNode(lens) parser.currentNode = lens lens.addNode(place) #parser.nextToken() while not Parser.isEnd(parser): parser.nextToken() if parser.thisToken().token == "$": Struct.index(parser, unary=False) parser.currentNode.nodes[-1].pattern = True else: Parser.callToken(parser) ExprParser.endExpr(parser) parser.currentNode = lens.owner def typeCheckLens(parser, lens): global B #hack global maybe B = Types.T("B", Types.All, "Lens") oB = B maybe = False def loop(n, typ): global B #hack global maybe if type(n) is Tree.Field: if n.pattern: t = Scope.typeOfVar(n,parser,parser.package,n.field) if t.isType(Types.Enum): enum = t elif t.isType(Types.FuncPointer) and t.returnType.isType(Types.Enum): enum = t.returnType else: n.error("var " + n.field + " is not one of the cases of an enum") replaceGen = {} for i in enum.remainingGen: replaceGen[i] = typ if not n.field in enum.const: n.error(n.field + "not a case of the enum "+str(enum)) else: n.enum = enum case = enum.const[n.field] before_enum = enum enum = Types.replaceT(enum, replaceGen) n.const = enum.const if len(case) == 1: r = case[0] elif len(case) == 0: n.error("This case of enum takes 0 values, thus cannot be used in lens") else: r = Types.Tuple(case) maybe = True B = r return loop(n.nodes[0], enum) else: ob = B #have to check if B, changed after loop was called res = loop(n.nodes[0], Types.Interface(False, { n.field: typ })) if not B is ob: if n.field in B.types: B = B.types[n.field] else: meth = B.getMethod(parser, n.field) if meth: B = meth else: n.error("Type "+str(B)+", has no field "+n.field) return res elif type(n) is Tree.ArrRead: return loop(n.nodes[0], Types.Array(False,typ)) elif type(n) in [Tree.Place, Tree.PlaceHolder]: return typ else: n.error("unexpected token "+n.token.token) lens_typ = loop(lens.nodes[0], B) lens.maybe = maybe A = Types.T("A", Types.All, "Lens") oB = B if maybe: B = Types.replaceT(parser.Maybe, {"Maybe.T" : oB}) name = "MaybeLens" if lens_typ.isType(Types.Enum): A = lens_typ else: A.owner = name B.owner = name else: B = B name = "Lens" A.owner = name #originalB = Types.T("B", , "Lens") Lens = Types.replaceT(Types.Interface(False, { "query": Types.FuncPointer([A], B), "set": Types.FuncPointer([A, oB], A), "toString": Types.FuncPointer([], Types.String(0)), }, coll.OrderedDict([(name+".A", A), (name+".B", oB)]), name=name), {"Lens.A": Types.T("A", lens_typ, name)}) #lens.type = Types.Interface(False, { # # "query": Types.FuncPointer([i.lensType], i.nodes[0].type), # "set": Types.FuncPointer([i.lensType, i.nodes[0].type], i.lensType), #}) lens.type = Lens Parser.exprToken["lens"] = parseLens Parser.exprToken["$"] = lambda parser: Error.parseError(parser, "Unexpected token $")
from django import forms from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError import re class ContactForm(forms.Form): def clean_phone(self): phone = self.cleaned_data.get('phone') phone_return = '' for letter in phone: if letter in '0123456789': phone_return += letter if 7 <= len(phone_return) <= 12: return phone_return else: raise ValidationError(_('В телефоне должно быть от 7 до 12 цифр')) first_name = forms.CharField( label=_('Имя'), max_length=100, required=True, ) last_name = forms.CharField( label=_('Фамилия'), max_length=100, required=True, ) email = forms.EmailField( label='Email', required=True, max_length=100, min_length=5, help_text=_('Введите корректный е-мейл'), widget=forms.TextInput( attrs={'placeholder': 'you@example.com', 'type': 'email'} ) ) phone = forms.CharField( label=_('Телефон'), required=False, max_length=25, min_length=5, help_text=_('От 7 до 25 знаков. Допускаются цифры, скобки (), пробелы и знаки + и -. Можно оставить пустым.'), widget=forms.TextInput( attrs={'placeholder': '+49 (303) 123-4567', 'type': 'tel', 'pattern': '[0-9\-\(\)\+\s]{7,}',} ) ) address = forms.CharField( label=_('Почтовый адрес'), required=False, max_length=250, widget=forms.TextInput( attrs={'placeholder': 'Pariser Platz 1, 10000 Berlin'})) message = forms.CharField( label=_('Сообщение'), required=False, widget=forms.Textarea(attrs={'rows': '4'}))
import torch, os from model.model import parsingNet from utils.common import merge_config from utils.dist_utils import dist_print from evaluation.eval_wrapper import eval_lane import yaml import argparse def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("--params", type=str, default='configs/culane.yaml') return parser.parse_args() if __name__ == "__main__": torch.backends.cudnn.benchmark = True args = parse_args() with open(args.params) as f: cfg = yaml.load(f, Loader=yaml.FullLoader) # data dict distributed = False if 'WORLD_SIZE' in os.environ: distributed = int(os.environ['WORLD_SIZE']) > 1 if distributed: torch.cuda.set_device(cfg['local_rank']) torch.distributed.init_process_group(backend='nccl', init_method='env://') dist_print('start testing...') assert cfg['network']['backbone'] in ['18','34','50','101','152','50next','101next','50wide','101wide'] if cfg['dataset']['name'] == 'CULane': cls_num_per_lane = 18 elif cfg['dataset']['name'] == 'Tusimple': cls_num_per_lane = 56 else: raise NotImplementedError net = parsingNet(network=cfg['network'],datasets=cfg['dataset']).cuda() state_dict = torch.load(cfg['test']['test_model'], map_location = 'cpu')['model'] compatible_state_dict = {} for k, v in state_dict.items(): if 'module.' in k: compatible_state_dict[k[7:]] = v else: compatible_state_dict[k] = v net.load_state_dict(compatible_state_dict, strict = False) # if distributed: # net = torch.nn.parallel.DistributedDataParallel(net, device_ids = [args.local_rank]) if not os.path.exists(cfg['test']['test_work_dir']): os.mkdir(cfg['test']['test_work_dir']) dataset = cfg['dataset']['name'] data_root = cfg['dataset']['data_root'] test_work_dir = cfg['test']['test_work_dir'] griding_num = cfg['dataset']['griding_num'] eval_lane(net, dataset, data_root, test_work_dir, griding_num, True, distributed)
''' @lanhuage: python @Descripttion: Deprecated. Just for test. @version: beta @Author: xiaoshuyui @Date: 2020-06-12 10:21:59 LastEditors: xiaoshuyui LastEditTime: 2020-10-20 09:44:02 ''' import sys sys.path.append("..") from convertmask.utils.methods import getMultiShapes if __name__ == "__main__": getMultiShapes.test()
from setuptools import setup setup( name='ez_yaml', version='1.1.0', description="Straighforward wrapper around Ruamel Yaml", url='https://github.com/jeff-hykin/ez_yaml', author='Jeff Hykin', author_email='jeff.hykin@gmail.com', license='MIT', packages=['ez_yaml'], install_requires=[ "ruamel.yaml" ], test_suite='tests', classifiers=[ # 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', ] )
# -*- coding: UTF-8 -*- from __future__ import print_function class Fibonacci(object): def __init__(self): self.a = 0 self.b = 1 def fib_iterative(self, n): self.a = 0 self.b = 1 for _ in range(n): self.a, self.b = self.b, self.a + self.b return self.a def fib_recursive(self, n): if n == 0 or n == 1: return n else: return self.fib_recursive(n-1) + self.fib_recursive(n-2) def fib_dynamic(self, n): cache = {} return self._fib_dynamic(n, cache) def _fib_dynamic(self, n, cache): if n == 0 or n == 1: return n if n in cache: return cache[n] cache[n] = self._fib_dynamic(n-1, cache) + self._fib_dynamic(n-2, cache) return cache[n]
# ~/models/synapses/PCDCNnMFDCN2015aSudhakar/__init__.py
""" Example 3. Optimizing textures. """ import argparse import glob import os import subprocess import cv2 import numpy as np import torch import tqdm import neural_renderer_torch as neural_renderer class Model(torch.nn.Module): def __init__(self, filename_obj, filename_ref): super(Model, self).__init__() # load .obj vertices, faces = neural_renderer.load_obj(filename_obj) self.vertices = torch.as_tensor(vertices[None, :, :]) self.faces = torch.as_tensor(faces) # create textures vertices_t, faces_t, textures = neural_renderer.create_textures(self.faces.shape[0], texture_size=4) self.vertices_t = torch.as_tensor(vertices_t[None, :, :]) self.faces_t = torch.as_tensor(faces_t) self.textures = torch.nn.Parameter(torch.as_tensor(textures[None, :, :, :]), requires_grad=True) # load reference image self.image_ref = neural_renderer.imread(filename_ref) self.image_ref = torch.as_tensor(self.image_ref) # setup renderer renderer = neural_renderer.Renderer() renderer.perspective = False self.renderer = renderer def to(self, device=None): super(Model, self).to(device) self.faces = self.faces.to(device) self.vertices = self.vertices.to(device) self.vertices_t = self.vertices_t.to(device) self.faces_t = self.faces_t.to(device) self.image_ref = self.image_ref.to(device) def __call__(self, textures): self.renderer.viewpoints = neural_renderer.get_points_from_angles(2.732, 0, np.random.uniform(0, 360)) images = self.renderer.render_rgb( self.vertices, self.faces, self.vertices_t, self.faces_t, torch.tanh(textures)) loss = torch.sum((images[0] - self.image_ref.permute((2, 0, 1))) ** 2) return loss def make_gif(working_directory, filename): # generate gif (need ImageMagick) options = '-delay 8 -loop 0 -layers optimize' subprocess.call('convert %s %s/_tmp_*.png %s' % (options, working_directory, filename), shell=True) for filename in glob.glob('%s/_tmp_*.png' % working_directory): os.remove(filename) def run(): parser = argparse.ArgumentParser() parser.add_argument('-io', '--filename_obj', type=str, default='./examples_chainer/data/teapot.obj') parser.add_argument('-ir', '--filename_ref', type=str, default='./examples_chainer/data/example3_ref.png') parser.add_argument('-or', '--filename_output', type=str, default='./examples_chainer/data/example3_result.gif') parser.add_argument('-g', '--gpu', type=int, default=0) args = parser.parse_args() working_directory = os.path.dirname(args.filename_output) model = Model(args.filename_obj, args.filename_ref) model.to(args.gpu) optimizer = torch.optim.Adam(model.parameters(), lr=0.01) loop = tqdm.tqdm(range(300)) for _ in loop: loop.set_description('Optimizing') optimizer.zero_grad() loss = model(model.textures) loss.backward() optimizer.step() # draw object loop = tqdm.tqdm(range(0, 360, 4)) for num, azimuth in enumerate(loop): loop.set_description('Drawing') model.renderer.viewpoints = neural_renderer.get_points_from_angles(2.732, 0, azimuth) images = model.renderer.render_rgb( model.vertices, model.faces, model.vertices_t, model.faces_t, torch.tanh(model.textures)) image = images.cpu().detach().numpy()[0].transpose((1, 2, 0)) image = np.maximum(image, 0) * 255 # Crop negative values image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) cv2.imwrite('%s/_tmp_%04d.png' % (working_directory, num), image) make_gif(working_directory, args.filename_output) if __name__ == '__main__': run()
# Generated by Django 4.0.1 on 2022-02-13 12:50 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Movie', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ('poster', models.ImageField(upload_to='posters/')), ('director_name', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Comment', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.TextField()), ('movie', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.movie')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
from ..helpers import IFPTestCase from intficpy.thing_base import Thing class TestDoff(IFPTestCase): def test_doff_player_not_wearing_gives_player_not_wearing_message(self): item = Thing(self.game, "item") item.moveTo(self.start_room) self.game.turnMain("doff item") self.assertIn("aren't wearing", self.app.print_stack.pop(), "Did not receive expected 'not wearing' scope message")
import concurrent.futures as future import time import numpy as np n=10 def f(x): time.sleep(0.2) return x*x if __name__ == '__main__': print('**************************') print('******no paraellel********') start=time.time() for i in range(n): print( 'no paraellel'+str(f(i)) ) end=time.time() print(f"Runtime of the program is {end - start} s") #*************Method 1************************ print('***********************************') print('*********paraellel method 1********') start=time.time() with future.ProcessPoolExecutor() as executor: results = [executor.submit(f,i) for i in range(n)] for result0 in future.as_completed(results): print( 'paraellel'+str(result0.result()) ) end=time.time() print(f"Runtime of the program is {end - start} s") #*************Method 2************************ print('***********************************') print('*********paraellel method 2********') start=time.time() with future.ProcessPoolExecutor() as executor: results = executor.map(f, np.arange(n)) for result0 in results: print( 'paraellel'+str(result0) ) end=time.time() print(f"Runtime of the program is {end - start} s")
from django.contrib import admin from .models import ModelResult class ModelResultAdmin(admin.ModelAdmin): list_display = ( "name", "owner", "benchmark", "model", "dataset", "results", "metadata", "approval_status", "approved_at", "created_at", "modified_at", ) admin.site.register(ModelResult, ModelResultAdmin)
# -*- coding: UTF-8 -*- # Copyright 2017 Luc Saffre # License: BSD (see file COPYING for details) # $ python setup.py test -s tests.PackagesTests.test_packages SETUP_INFO = dict( name='lino-vilma', version='18.8.0', install_requires=['lino_noi'], # tests_require=['pytest', 'mock'], test_suite='tests', description=("A Lino application for managing village contacts"), long_description="""\ .. image:: https://readthedocs.org/projects/lino/badge/?version=latest :alt: Documentation Status :target: http://lino.readthedocs.io/en/latest/?badge=latest .. image:: https://coveralls.io/repos/github/lino-framework/noi/badge.svg?branch=master :target: https://coveralls.io/github/lino-framework/noi?branch=master .. image:: https://travis-ci.org/lino-framework/noi.svg?branch=stable :target: https://travis-ci.org/lino-framework/noi?branch=stable .. image:: https://img.shields.io/pypi/v/lino-noi.svg :target: https://pypi.python.org/pypi/lino-noi/ .. image:: https://img.shields.io/pypi/l/lino-noi.svg :target: https://pypi.python.org/pypi/lino-noi/ Lino Vilma is a customizable contact management system for villages. - The central project homepage is http://vilma.lino-framework.org - Technical documentation, including demo projects, API and tested specs see http://www.lino-framework.org/specs/vilma - For *introductions* and *commercial information* about Lino Vilma please see `www.saffre-rumma.net <http://www.saffre-rumma.net/noi/>`__. """, author='Luc Saffre', author_email='luc@lino-framework.org', url="http://vilma.lino-framework.org", license='BSD License', classifiers="""\ Programming Language :: Python Programming Language :: Python :: 2 Development Status :: 4 - Beta Environment :: Web Environment Framework :: Django Intended Audience :: Developers Intended Audience :: System Administrators Intended Audience :: Information Technology Intended Audience :: Customer Service License :: OSI Approved :: BSD License Operating System :: OS Independent Topic :: Software Development :: Bug Tracking """.splitlines()) SETUP_INFO.update(packages=[str(n) for n in """ lino_vilma lino_vilma.lib lino_vilma.lib.vilma lino_vilma.lib.vilma.fixtures lino_vilma.lib.contacts lino_vilma.lib.contacts.fixtures """.splitlines() if n]) SETUP_INFO.update(message_extractors={ 'lino_vilma': [ ('**/cache/**', 'ignore', None), ('**.py', 'python', None), ('**.js', 'javascript', None), ('**/config/**.html', 'jinja2', None), ], }) SETUP_INFO.update(include_package_data=True, zip_safe=False) # SETUP_INFO.update(package_data=dict()) # def add_package_data(package, *patterns): # l = SETUP_INFO['package_data'].setdefault(package, []) # l.extend(patterns) # return l # l = add_package_data('lino_noi.lib.noi') # for lng in 'de fr'.split(): # l.append('locale/%s/LC_MESSAGES/*.mo' % lng)
""" Various job content handler classes, grouped by the content's MIME type """ import typing import urllib.parse import bs4 from . import helper as _helper class BaseContentHandler: """ Base class for all variants of content handler classes A subclass should implement the ``analyze`` class method which is dedicated to actually handle the server's response. This method should only accept one argument, the download job itself. That ``DownloadJob`` object should have an attribute ``options`` which can be accessed like a mapping or a namespace. An implementation should document which attributes are required by it, but it should not assume that certain keys are present. The default classes have properly set defaults, though. Therefore, a subclass must provide reasonable default values for missing keys. The return value of that method should provide the complete and final content as it should be stored in the target file on disk. Note that the handler should not set the ``final_content`` attribute. Additionally, subclasses must set the class variable MIME_TYPE to indicate which mime types are support using ``accepts`` method. """ MIME_TYPE: typing.ClassVar[typing.List[str]] """List of MIME types that can be analyzed by the specific handler class""" @classmethod def accepts(cls, content_type: str) -> bool: """ Determine whether the given content MIME type is accepted by the handler """ return content_type.lower().split(";")[0].strip() in map( lambda s: s.lower(), cls.MIME_TYPE ) @classmethod def analyze(cls, job) -> typing.AnyStr: raise NotImplementedError @classmethod def _check_type(cls, job): """ Raise a TypeError if job is no ``DownloadJob`` instance """ # TODO: improve or remove type checking if not any(map(lambda c: c.__name__ == "DownloadJob", type(job).mro())): raise TypeError(f"Expected DownloadJob, got {type(job)}") class _DummyContentHandler(BaseContentHandler): """ Dummy content handler returning the exact content without modification A subclass must set the ``MIME_TYPES`` class attribute accordingly! """ @classmethod def analyze(cls, job) -> typing.AnyStr: cls._check_type(job) job.logger.debug(f"{cls.__name__} doesn't implement analyze yet...") return job.response.text class PlaintextContentHandler(_DummyContentHandler): MIME_TYPE = ["text/plain"] class HTMLContentHandler(BaseContentHandler): """ Handler class for HTML content """ MIME_TYPE = ["text/html"] @classmethod def analyze(cls, job) -> typing.AnyStr: """ Analyze and edit the job's content, extracting potential new targets Supported keys in the ``options`` storage: * ``ascii_only`` * ``load_hyperlinks`` * ``load_images`` * ``load_javascript`` * ``load_stylesheets`` * ``lowered_paths`` * ``pretty_html`` * ``rewrite_references`` :param job: the download job that should be handled and analyzed :return: the content of the file that should be written to disk """ # TODO: add/ensure support for non-default charsets (HTTP header field) def get_relative_path(ref: str) -> str: """ Get the relative path pointing from the current file towards ``ref`` :param ref: any kind of reference, but works best for absolute URLs (therefore, one should better make it an absolute URL before) :return: relative path pointing from the current file towards the reference """ path = urllib.parse.urlparse(ref).path if job.options.ascii_only: path = _helper.convert_to_ascii_only( path, job.options.ascii_conversion_table ) if job.options.lowered_paths: path = path.lower() if path.startswith("/"): path = path[1:] return path def handle_tag( tag_type: str, attr_name: str, filter_func: typing.Callable[[bs4.element.Tag], bool] ): """ Handle all tags of a specific type using one of its attributes This method extracts the URLs found in all tags of the specified type, provided the name of the attribute where the URL will be found is present as well. If rewriting of references had been enabled, this step will also be done in this method. Use the filter function to restrict the range of scanned and processed tags in the input file. :param tag_type: type of HTML tag (e.g. ``a`` or ``img``) :param attr_name: attribute name for that tag (e.g. ``href`` or ``src``) :param filter_func: function which accepts exactly one parameter, one single HTML tag, and determines whether this tag should be analyzed (filtering and processing of URLs takes place after this filter, so one doesn't need to care about e.g. schemes or other network locations) """ nonlocal job nonlocal soup for tag in soup.find_all(tag_type): if tag.has_attr(attr_name) and filter_func(tag): target = _helper.find_absolute_reference( tag.get(attr_name), job.netloc, job.remote_url, job.options.https_mode, base ) if target is not None: job.references.add(target) relative_path = get_relative_path(target) tag.attrs[attr_name] = relative_path def stylesheet_filter_func(tag: bs4.element.Tag) -> bool: """ Filter function for stylesheet tags only """ if tag.has_attr("rel"): is_css = "stylesheet" in tag.get("rel") enabled = not tag.has_attr("disabled") if is_css and enabled: return True return False cls._check_type(job) # Extract the document's base URI base = None soup = bs4.BeautifulSoup(job.response.text, features="html.parser") if soup.base is not None and soup.base.has_attr("href"): base = soup.base.get("href") if urllib.parse.urlparse(base).netloc == "": base = urllib.parse.urljoin(job.netloc, base) base = urllib.parse.urlparse(base) job.logger.debug(f"Base: {job}") # Remove all `base` tags while soup.base: job.logger.debug("Removing (one of) the `base` tag(s)") soup.base.replace_with("") # Handle the various types of references, if enabled if job.options.include_hyperlinks: handle_tag("a", "href", lambda x: True) if job.options.include_stylesheets: # TODO: add support for icons and scripts added by `link` tags handle_tag("link", "href", stylesheet_filter_func) if job.options.include_javascript: handle_tag("script", "src", lambda x: True) if job.options.include_images: handle_tag("img", "src", lambda x: True) # Determine the final content, based on the specified options if job.options.pretty_html: return soup.prettify() if job.options.rewrite_references: return soup.decode() return job.response.text class CSSContentHandler(_DummyContentHandler): MIME_TYPE = ["text/css"] class JavaScriptContentHandler(_DummyContentHandler): MIME_TYPE = ["application/javascript"] ALL_DEFAULT_HANDLER_CLASSES: typing.List[typing.Type[BaseContentHandler]] = [ PlaintextContentHandler, HTMLContentHandler, CSSContentHandler, JavaScriptContentHandler ]
CUSTOM_HEADER = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36" }
import air_instance
lower_ci = xbar - me upper_ci = xbar + me (print("We are {}% confident that the true weight of chicken is between {} and {} grams.". format((1-alpha)*100, lower_ci, upper_ci)))
# -*- coding: utf-8 -*- from odoo import models, fields, api, _ import odoo.addons.decimal_precision as dp from odoo.tools import float_is_zero, float_compare, DEFAULT_SERVER_DATETIME_FORMAT class SupportInvoiceLine(models.Model): _name = "support.invoice.line" product_id = fields.Many2one( 'product.product', string='Product', required=True, ) name = fields.Text( string='Description' ) price_unit = fields.Float( string='Unit Price', digits=dp.get_precision('Product Price') ) quantity = fields.Float( string='Quantity', digits=dp.get_precision('Product Unit of Measure'), required=True, default=1 ) product_uom_qty = fields.Float( string='Quantity', digits=dp.get_precision('Product Unit of Measure'), required=True, default=1.0, ) product_uom = fields.Many2one( 'uom.uom', string='Unit of Measure', ) support_id = fields.Many2one( 'helpdesk.support', string='Support Invoice', ) tax_id = fields.Many2many( 'account.tax', string='Taxes', ) analytic_account_id = fields.Many2one( 'account.analytic.account', string='Analytic Account' ) is_invoice = fields.Boolean( string='Is Invoice Create', track_visibility='onchange', default=False, copy=False, ) @api.multi def _compute_tax_id(self): for line in self: fpos = line.support_id.partner_id.property_account_position_id # If company_id is set, always filter taxes by the company taxes = line.product_id.taxes_id line.tax_id = taxes @api.multi @api.onchange('product_id') def product_id_change(self): if not self.product_id: return {'domain': {'product_uom': []}} vals = {} domain = {'product_uom': [('category_id', '=', self.product_id.uom_id.category_id.id)]} if not self.product_uom or (self.product_id.uom_id.category_id.id != self.product_uom.category_id.id): vals['product_uom'] = self.product_id.uom_id.id vals['price_unit'] = self.product_id.lst_price vals['name'] = self.product_id.name self.update(vals) return {'domain': domain} @api.onchange('product_uom', 'product_uom_qty') def product_uom_change(self): if not self.product_uom: self.price_unit = 0.0 return if self.support_id.partner_id.property_product_pricelist and self.support_id.partner_id: product = self.product_id.with_context( lang=self.support_id.partner_id.lang, partner=self.support_id.partner_id.id, quantity=self.product_uom_qty, date_order=fields.Datetime.now, pricelist=self.support_id.partner_id.property_product_pricelist.id, uom=self.product_uom.id, fiscal_position=self.env.context.get('fiscal_position'), ) self.price_unit = self.env['account.tax']._fix_tax_included_price(product.price, product.taxes_id, self.tax_id) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
""" Data Loader and Feature Dictionary classes for InterpRecSys NOTE: some code borrowed from here https://github.com/princewen/tensorflow_practice/blob/master/recommendation/Basic-DeepFM-model/data_reader.py @Author: Zeyu Li <zyli@cs.ucla.edu> or <zeyuli@g.ucla.edu> """ import pandas as pd from const import Constant, Config DATA_DIR = Constant.PARSE_DIR class DataLoader: def __init__(self , dataset , batch_size): """ :param: dataset: name of dataset :param: use_graph: whether need to build graph :param: batch_size: """ # ==== params ===== self.dataset = dataset self.cfg = Config(dataset=dataset) # ===== sizes ===== self.batch_size = batch_size self.train_size, self.test_size, self.valid_size = 0, 0, 0 # ===== inner variables ===== self.batch_index = 0 self.has_next = False # ===== datasets ===== self.train_ind, self.train_label = self.load_data("train") self.test_ind, self.test_label = self.load_data("test") self.val_ind, self.val_label = self.load_data("val") self.train_size = self.train_label.shape[0] self.test_size = self.test_label.shape[0] self.val_size = self.val_label.shape[0] self.feature_size, self.field_size = self.load_statistics() # ===== iter count ===== self.train_iter_count = self.train_size // self.batch_size def load_data(self, usage): """ usage as one of `train`, `test`, `val` :param usage: :return: usage_ind.np.array usage_label.np.array """ if usage not in ["train", "test", "val"]: raise ValueError terms = ["ind", "label"] ret_sets = [] data_dir = Constant.PARSE_DIR + self.dataset + "/" for trm in terms: ret_sets.append( pd.read_csv( data_dir + "{}_{}.csv".format(usage, trm), header=None).values) return ret_sets def generate_train_batch_ivl(self): bs, bi = self.batch_size, self.batch_index end_ind = min((bi + 1) * bs, self.train_size) b_ind = self.train_ind[bs * bi:end_ind] b_label = self.train_label[bs * bi: end_ind] self.batch_index += 1 if self.batch_index == self.train_iter_count: self.batch_index = 0 self.has_next = False return b_ind, b_label def generate_test_ivl(self): bs = self.batch_size batch_count = self.test_size // bs + 1 for bi in range(batch_count): end_index = min((bi + 1) * bs, self.test_size) batch_ind = self.test_ind[bs * bi: end_index] batch_label = self.test_label[bs * bi: end_index] yield batch_ind, batch_label def generate_val_ivl(self): bs = self.batch_size batch_count = self.val_size // bs + 1 for bi in range(batch_count): end_ind = min((bi + 1) * bs, self.val_size) batch_ind = self.val_ind[bs * bi: end_ind] batch_label = self.val_label[bs * bi: end_ind] yield batch_ind, batch_label def load_statistics(self): with open(Constant.PARSE_DIR + "{}/feat_dict".format(self.dataset), "r") as fin: feat_size, field_size = [int(x) for x in fin.readline().split(" ")] return feat_size, field_size
# Generated by Django 2.2.5 on 2019-10-28 07:01 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('sema', '0034_semaproduct_pies_c10_des'), ] operations = [ migrations.RemoveField( model_name='semaproduct', name='pies_c10_ext', ), ]
import os import pytest import requests_mock from arcus import Client @pytest.fixture def client(): yield Client(sandbox=True) @pytest.fixture def client_proxy(): proxy = os.environ['ARCUS_PROXY'] with requests_mock.mock() as m: m.get( f'{proxy}/account', json=dict( primary=dict( name='cuenca', balance=63869.33, minimum_balance=0.0, currency='MXN', ), topup=dict( name='cuenca-tae', balance=69720.0, minimum_balance=0.0, currency='MXN', ), ), ) m.post( f'{proxy}/account', json=dict(message='Missing Authentication Token'), status_code=403, ) yield Client(sandbox=True, proxy=proxy)
from .AST.sentence import Delete from .AST.expression import Relational, Logical from .executeExpression import executeExpression from storageManager.TypeChecker_Manager import * from .storageManager.jsonMode import * from .AST.error import * import sys sys.path.append("../") from console import print_error, print_success #def delete(database: str, table: str, columns: list) -> int: #0 -> Successful operation #1 -> Operation error #2 -> Database does not exist #3 -> Table does not exist #4 -> Primary key does not exist #anything else -> Operation error def executeDelete(self, Delete_): # Delete : { # table: "table_name", # expression: expression # } # expression : { (Relational | Logical} # Relational : { # value1: Value, # value2: Value, # type: ("=" | "!=" | "<>" | ">" | "<" | ">=" | "<=") # } # Logical : { # value1: (Logical | Relational), # value2: (Logical | Relational), # type: ("AND" | "OR") # } delete_: Delete = Delete_ table_name = delete_.table expression_ = delete_.expression relational_ = get_first_relational(self, expression_) if relational_ != None: TypeChecker_Manager_ = get_TypeChecker_Manager() if TypeChecker_Manager_ != None: use_: str = get_use(TypeChecker_Manager_) if use_ != None: database_ = get_database(use_, TypeChecker_Manager_) if database_ != None: table_ = get_table(table_name, database_) if table_ != None: result1 = executeExpression(self, relational_.value1) result2 = executeExpression(self, relational_.value2) result_type = relational_.type column_ = get_column(str(result1.value), table_) if column_ != None: #---------------------------------------------------------------------- column_number_to_compare = 0 i = 0 while i < len(table_.columns): if table_.columns[i].name == column_.name: column_number_to_compare = i i = len(table_.columns) i += 1 table_records = extractTable(database_.name, table_.name) table_record_to_delete = [] i = 0 while i < len(table_records): if(result_type == '='): if str(table_records[i][column_number_to_compare]) == str(result2.value): table_record_to_delete.append(table_records[i]) elif(result_type == '!=' or result_type == '<>'): if str(table_records[i][column_number_to_compare]) != str(result2.value): table_record_to_delete.append(table_records[i]) elif(result_type == '>'): if str(table_records[i][column_number_to_compare]) > str(result2.value): table_record_to_delete.append(table_records[i]) elif(result_type == '<'): if str(table_records[i][column_number_to_compare]) < str(result2.value): table_record_to_delete.append(table_records[i]) elif(result_type == '>='): if str(table_records[i][column_number_to_compare]) >= str(result2.value): table_record_to_delete.append(table_records[i]) elif(result_type == '<='): if str(table_records[i][column_number_to_compare]) <= str(result2.value): table_record_to_delete.append(table_records[i]) i += 1 columns_with_primary_keys = [] i = 0 while i < len(table_.columns): if table_.columns[i].primary_ != None and table_.columns[i].primary_ == True: columns_with_primary_keys.append(i) i += 1 table_record_to_delete_only_with_primary_keys = [] i = 0 while i < len(table_record_to_delete): primary_keys = [] j = 0 while j < len(columns_with_primary_keys): primary_keys.append(table_record_to_delete[i][(columns_with_primary_keys[j])]) j += 1 table_record_to_delete_only_with_primary_keys.append(primary_keys) i += 1 number_of_rows_removed = 0 i = 0 while i < len(table_record_to_delete_only_with_primary_keys): try: #success result_delete = delete(database_.name, table_.name, table_record_to_delete_only_with_primary_keys[i]) if result_delete == 0: #print_success("QUERY", "Delete row in " + str(table_.name) + " table, done successfully") number_of_rows_removed += 1 elif result_delete == 1: #print_error("UNKNOWN ERROR", "Operation error") a = 0 elif result_delete == 2: #print_error("SEMANTIC ERROR", "Database does not exist") a = 0 elif result_delete == 3: #print_error("SEMANTIC ERROR", "Table does not exist") a = 0 elif result_delete == 4: #print_error("SEMANTIC ERROR", "Primary key does not exist") a = 0 else: #print_error("UNKNOWN ERROR", "Operation error") a = 0 except Exception as e: #print_error("UNKNOWN ERROR", "instruction not executed") a = 0 #print(e) i += 1 print_success("QUERY",str(number_of_rows_removed) + " rows removed successfully") #---------------------------------------------------------------------- else: print_error("SEMANTIC ERROR", str(relational_.value1) + " column does not exist in " + table_.name + " table") else: print_error("SEMANTIC ERROR", "Table does not exist") else: print_error("SEMANTIC ERROR", "Database to use does not exist") else: print_warning("RUNTIME ERROR", "Undefined database to use") else: print_error("UNKNOWN ERROR", "instruction not executed") else: print_error("UNKNOWN ERROR", "instruction not executed") def get_first_relational(self, expression_) -> Relational: relational_: Relational = None current_expression = expression_ while isinstance(current_expression, Logical) == True: current_expression = current_expression.value1 relational_ = current_expression return relational_
import time from siemens.pac import PACx200 times = [] p = PACx200('192.168.0.80') while True: try: p.read() print(p.as_dict(replace_nan=True)) except: break time.sleep(1)
# from django.contrib import admin # from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register # from .models import JoinData # class JoinDataAdmin(ModelAdmin): # model = JoinData # menu_label = '報名資料' # ditch this to use verbose_name_plural from model # menu_icon = 'form' # change as required # list_display = ('ebirdid', 'email','password','team','register_time','is_valid') # list_filter = ('team', 'register_time') # search_fields = ('team', 'register_time',) # modeladmin_register(JoinDataAdmin) # class JoinDataDAdmin(admin.ModelAdmin): # list_display = ('ebirdid', 'email','password','team','register_time','is_valid') # list_filter = ('team', 'register_time') # admin.site.register(JoinData, JoinDataDAdmin)
import matplotlib.pyplot as plt import pandas as pd import numpy as np import re plt.rcParams['pdf.fonttype'] = 42 from utility import * DATASET_LIST = ['wikivot', 'referendum', 'slashdot', 'wikicon'] + ['p2pgnutella31', 'youtube', 'roadnetCA', 'fb-artist'] Density = {'p2pgnutella31':2.3630204838142714, 'youtube':2.632522975795011, 'roadnetCA':1.4077949080147323, 'fb-artist':16.21906364446204} # edge density, required by computation of modularity score PCA_LIST = ['Type1', 'Type2', 'Type3', 'Type4'] def plot_q_Task(fname, qs, MODE='adj', ERR=False): df = pd.read_csv('{}.csv'.format(fname)) fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(6,3), constrained_layout=True) x1, x2 = df[df["algo"]=='RSVD'], df[df["algo"]=='RSum'] if MODE=='adj': c, ctitle, otitle, plot_baseline = "R", "$R(\hat{u})$", "polarity", True else: c, ctitle, otitle, plot_baseline = "eigval", "$\hat{u}^TA\hat{u}$", "modularity", False for j,dname in enumerate(DATASET_LIST): tx1, tx2 = x1[x1["dataset"]==dname], x2[x2["dataset"]==dname] if len(tx1)==0: continue rs1, rs2, re1, re2, os1, os2 = [],[],[],[],[],[] for q in qs: rs1 += [tx1[tx1["q"]==q][c].mean()] rs2 += [tx2[tx2["q"]==q][c].mean()] re1 += [tx1[tx1["q"]==q][c].std()] re2 += [tx2[tx2["q"]==q][c].std()] o1, o2 = tx1[tx1["q"]==q]["obj"].mean(), tx2[tx2["q"]==q]["obj"].mean() if MODE=='mod': o1, o2 = o1/(4*Density[dname]), o2/(4*Density[dname]) os1 += [o1] os2 += [o2] if ERR: axs[0].errorbar(qs, rs1, yerr=re1, label=dname, ls='-', color='C{}'.format(j)) axs[0].errorbar(qs, rs2, yerr=re2, ls='-.', color='C{}'.format(j)) else: axs[0].plot(qs, rs1, label=dname, ls='-', color='C{}'.format(j)) axs[0].plot(qs, rs2, ls='-.', color='C{}'.format(j)) axs[1].plot(qs, os1, label=dname, ls='-', color='C{}'.format(j)) axs[1].plot(qs, os2, ls='-.', color='C{}'.format(j)) # baseline scipy if plot_baseline: tx0 = df[(df["algo"]=='eigsh')&(df["dataset"]==dname)] o0 = tx0["obj"].mean() axs[1].hlines(o0, 1, qs[-1], label='Lanczos', ls='dotted', color='C{}'.format(j), linewidth=1) axs[0].set_title(ctitle) axs[0].set_xlabel("$q$") axs[1].set_title(otitle) axs[1].set_xlabel("$q$") plt.savefig('{}.pdf'.format(fname), bbox_inches='tight') def plot_d_Task(fname, ds, MODE='adj', ERR=False): df = pd.read_csv('{}.csv'.format(fname)) fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(6,3), constrained_layout=True) x1, x2 = df[df["algo"]=='RSVD'], df[df["algo"]=='RSum'] if MODE=='adj': c, ctitle, otitle, plot_baseline = "R", "$R(\hat{u})$", "polarity", True else: c, ctitle, otitle, plot_baseline = "eigval", "$\hat{u}^TA\hat{u}$", "modularity", False for j,dname in enumerate(DATASET_LIST): tx1, tx2 = x1[x1["dataset"]==dname], x2[x2["dataset"]==dname] if len(tx1)==0: continue rs1, rs2, re1, re2, os1, os2 = [],[],[],[],[],[] for d in ds: rs1 += [tx1[tx1["d"]==d][c].mean()] rs2 += [tx2[tx2["d"]==d][c].mean()] re1 += [tx1[tx1["d"]==d][c].std()] re2 += [tx2[tx2["d"]==d][c].std()] o1, o2 = tx1[tx1["d"]==d]["obj"].mean(), tx2[tx2["d"]==d]["obj"].mean() if MODE=='mod': o1, o2 = o1/(4*Density[dname]), o2/(4*Density[dname]) os1 += [o1] os2 += [o2] if ERR: axs[0].errorbar(ds, rs1, yerr=re1, label=dname, ls='-', color='C{}'.format(j)) axs[0].errorbar(ds, rs2, yerr=re2, ls='-.', color='C{}'.format(j)) else: axs[0].plot(ds, rs1, label=dname, ls='-', color='C{}'.format(j)) axs[0].plot(ds, rs2, ls='-.', color='C{}'.format(j)) axs[1].plot(ds, os1, label=dname, ls='-', color='C{}'.format(j)) axs[1].plot(ds, os2, ls='-.', color='C{}'.format(j)) # baseline scipy if plot_baseline: tx0 = df[(df["algo"]=='eigsh')&(df["dataset"]==dname)] o0 = tx0["obj"].mean() axs[1].hlines(o0, 1, ds[-1], label='Lanczos', ls='dotted', color='C{}'.format(j), linewidth=1) axs[0].set_title(ctitle) axs[0].set_xlabel("$d$") axs[1].set_title(otitle) axs[1].set_xlabel("$d$") plt.savefig('{}.pdf'.format(fname), bbox_inches='tight') def plot_d_q_Task(fdname, fqname, oname, ds, qs, MODE='adj'): df1, df2 = pd.read_csv('{}.csv'.format(fdname)), pd.read_csv('{}.csv'.format(fqname)) fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(6,2.5), constrained_layout=True) x11, x12, x21, x22 = df1[df1["algo"]=='RSVD'], df1[df1["algo"]=='RSum'], df2[df2["algo"]=='RSVD'], df2[df2["algo"]=='RSum'] if MODE=='adj': c, otitle = "obj", "polarity" elif MODE=='mod': c, otitle = "obj", "modularity" else: c, otitle = "eigval", "$R(\hat{u})$" # plot d for j,dname in enumerate(DATASET_LIST): tx1, tx2 = x11[x11["dataset"]==dname], x12[x12["dataset"]==dname] if len(tx1)==0: continue rs1, rs2 = [],[] for d in ds: r1, r2 = tx1[tx1["d"]==d][c].mean(), tx2[tx2["d"]==d][c].mean() if MODE=='mod': r1, r2 = r1/(4*Density[dname]), r2/(4*Density[dname]) rs1 += [r1] rs2 += [r2] axs[0].plot(ds, rs1, label=dname, ls='-', color='C{}'.format(j)) axs[0].plot(ds, rs2, ls='-.', color='C{}'.format(j)) # plot q for j,dname in enumerate(DATASET_LIST): tx1, tx2 = x21[x21["dataset"]==dname], x22[x22["dataset"]==dname] if len(tx1)==0: continue rs1, rs2 = [],[] for q in qs: r1, r2 = tx1[tx1["q"]==q][c].mean(), tx2[tx2["q"]==q][c].mean() if MODE=='mod': r1, r2 = r1/(4*Density[dname]), r2/(4*Density[dname]) rs1 += [r1] rs2 += [r2] axs[1].plot(qs, rs1, label=dname, ls='-', color='C{}'.format(j)) axs[1].plot(qs, rs2, ls='-.', color='C{}'.format(j)) axs[0].set_ylabel(otitle) axs[0].set_xlabel("$d$") axs[1].set_ylabel(otitle) axs[1].set_xlabel("$q$") plt.savefig('{}.pdf'.format(oname), bbox_inches='tight') def plot_q_PCA(fname, qs, ERR=False): df = pd.read_csv('{}.csv'.format(fname)) fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(6,2.5), constrained_layout=True) x1, x2 = df[df["algo"]=='RSVD'], df[df["algo"]=='RSum'] for j,dname in enumerate(PCA_LIST): tx1, tx2 = x1[x1["dataset"]==dname], x2[x2["dataset"]==dname] if len(tx1)==0: continue tx0 = df[(df["algo"]=='eigsh')&(df["dataset"]==dname)] t0 = tx0["time"].mean() rs1, rs2, re1, re2, ts1, ts2 = [],[],[],[],[],[] for q in qs: rs1 += [tx1[tx1["q"]==q]["R"].mean()] rs2 += [tx2[tx2["q"]==q]["R"].mean()] re1 += [tx1[tx1["q"]==q]["R"].std()] re2 += [tx2[tx2["q"]==q]["R"].std()] ts1 += [t0/tx1[tx1["q"]==q]["time"].mean()] ts2 += [t0/tx2[tx2["q"]==q]["time"].mean()] if ERR: axs[0].errorbar(qs, rs1, yerr=re1, label=dname, ls='-', color='C{}'.format(j)) #axs[0].errorbar(qs, rs2, yerr=re2, ls='-.', color='C{}'.format(j)) else: axs[0].plot(qs, rs1, label=dname, ls='-', color='C{}'.format(j)) #axs[0].plot(qs, rs2, ls='-.', color='C{}'.format(j)) axs[1].plot(qs, ts1, ls='-', color='C{}'.format(j)) #axs[1].plot(qs, ts2, ls='-.', color='C{}'.format(j)) axs[0].set_ylabel("$R(\hat{u})$") axs[0].set_xlabel("$q$") axs[1].set_ylabel("Speedup ratio") axs[1].set_xlabel("$q$") axs[1].hlines(1, 1, qs[-1], ls='dotted', color='C9', linewidth=1) plt.savefig('{}.pdf'.format(fname), bbox_inches='tight') def plot_d_PCA(fname, ds, ERR=False): df = pd.read_csv('{}.csv'.format(fname)) fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(6,2.5), constrained_layout=True) x1, x2 = df[df["algo"]=='RSVD'], df[df["algo"]=='RSum'] for j,dname in enumerate(PCA_LIST): tx1, tx2 = x1[x1["dataset"]==dname], x2[x2["dataset"]==dname] if len(tx1)==0: continue tx0 = df[(df["algo"]=='eigsh')&(df["dataset"]==dname)] t0 = tx0["time"].mean() rs1, rs2, re1, re2, ts1, ts2 = [],[],[],[],[],[] for d in ds: rs1 += [tx1[tx1["d"]==d]["R"].mean()] rs2 += [tx2[tx2["d"]==d]["R"].mean()] re1 += [tx1[tx1["d"]==d]["R"].std()] re2 += [tx2[tx2["d"]==d]["R"].std()] ts1 += [t0/tx1[tx1["d"]==d]["time"].mean()] ts2 += [t0/tx2[tx2["d"]==d]["time"].mean()] if ERR: axs[0].errorbar(ds, rs1, yerr=re1, label=dname, ls='-', color='C{}'.format(j)) #axs[0].errorbar(ds, rs2, yerr=re2, ls='-.', color='C{}'.format(j)) else: axs[0].plot(ds, rs1, label=dname, ls='-', color='C{}'.format(j)) #axs[0].plot(ds, rs2, ls='-.', color='C{}'.format(j)) axs[1].plot(ds, ts1, ls='-', color='C{}'.format(j)) #axs[1].plot(ds, ts2, ls='-.', color='C{}'.format(j)) axs[0].set_ylabel("$R(\hat{u})$") axs[0].set_xlabel("$d$") axs[1].set_ylabel("Speedup ratio") axs[1].set_xlabel("$d$") axs[1].hlines(1, 1, ds[-1], ls='dotted', color='C9', linewidth=1) plt.savefig('{}.pdf'.format(fname), bbox_inches='tight') def plot_SyntheticEigvals(N): fig = plt.figure(figsize=(3,3)) for j,type in enumerate(PCA_LIST): Sigma = get_eigvals(type, N=N) plt.plot(np.sort(Sigma)[::-1], label=type, color='C{}'.format(j)) print("kappa={:.4f}".format(np.sum(np.array(Sigma)**3) / np.sum(np.abs(np.array(Sigma)**3)))) plt.xlabel('$i$') plt.yscale('symlog', linthreshy=0.01) plt.ylabel('$\lambda_i$') #plt.legend(loc='center left', bbox_to_anchor=(-0.75, -0.5), fancybox=True, shadow=True, ncol=4) plt.savefig('synthetic-eigvals_n{}.pdf'.format(N), bbox_inches='tight') plot_d_q_Task("SCG-d_q1-R", "SCG-q_d10-R", "SCG-real_dq", [1,5,10,25,50], [1,2,4,8,16], MODE='adj') plot_d_q_Task("MOD-d_q1-S", "MOD-q_d10-S", "MOD-real_dq", [1,5,10,25,50], [1,2,4,8,16], MODE='mod') plot_SyntheticEigvals(10000) plot_d_PCA("SYN_d_q1_n10000", [1,5,10,25]) plot_q_PCA("SYN_q_d10_n10000", [1,2,4,8])
# -*- coding:utf-8 -*- # -*- created by: yongzhuo -*- import requests from lxml import etree import pickle import time import datetime '''注意: Cookie需要自己加''' Cookie = '******注意: Cookie需要自己加' def txtRead(filePath, encodeType='utf-8'): '''读取txt文件''' listLine = [] try: file = open(filePath, 'r', encoding=encodeType) while True: line = file.readline() if not line: break listLine.append(line) file.close() except Exception as e: print(str(e)) finally: return listLine def txtWrite(listLine, filePath, type='w', encodeType='utf-8'): '''读取txt文件''' try: file = open(filePath, type, encoding=encodeType) file.writelines(listLine) file.close() except Exception as e: print(str(e)) def is_valid_date(strdate): '''判断是否是一个有效的日期字符串''' try: if ":" in strdate: time.strptime(strdate, "%Y-%m-%d %H:%M:%S") else: time.strptime(strdate, "%Y-%m-%d") return True except: return False # 分城市,昆明 # 公众问题 def process_city_2(addr): headers = { "Host": 'xxcx.yn.gov.cn', "User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36', "Accept": 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', "Accept-Language": 'zh-CN,zh;q=0.9', "Accept-Encoding": 'gzip, deflate', "Connection": 'keep-alive', "Upgrade-Insecure-Requests": "1", "Cache-Control": "max-age=0", "Cookie": Cookie } res = requests.get(addr, headers=headers) # print(res.content) html_content = res.content.decode('gb2312', 'ignore') html = etree.HTML(html_content) # html.xpath('//td[@class="title"]//text()') # tableCont = html.xpath('//tr[@class="tableCont"]//text()') tableCont_herf = html.xpath('//tr[@height="33px"]/td[@width="50%"]//a//@href') tableCont = html.xpath('//tr[@height="33px"]/td//text()') t_all = [] t_list_one = [] for tableCont_0 in tableCont: tableCont_0_replace = tableCont_0.replace('\r', '').replace('\n', '').replace('\t', '').replace(' ', '') if tableCont_0_replace == '': continue if t_all: len_p = len(t_all) else: len_p = 0 t_list_one.append(tableCont_0) if is_valid_date(tableCont_0): if len(t_list_one) == 3: t_list_one.append(tableCont_0) t_list_one.append(tableCont_herf[len_p]) t_all.append('momomomo'.join(t_list_one) + '\n') if len(t_list_one) == 4: t_list_one.append(tableCont_herf[len_p]) t_all.append('momomomo'.join(t_list_one) + '\n') t_list_one = [] return t_all def process_qa_city_2(addr=None): headers = { "Host": 'xxcx.yn.gov.cn', "User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36', "Accept": 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', "Accept-Language": 'zh-CN,zh;q=0.9', "Accept-Encoding": 'gzip, deflate', "Connection": 'keep-alive', "Upgrade-Insecure-Requests": "1", "Cache-Control": "max-age=0", "Cookie": Cookie } res = requests.get(addr, headers=headers) # print(res.content) html_content = res.content.decode('gb2312', 'ignore') html = etree.HTML(html_content) # html.xpath('//td[@class="title"]//text()') # tableCont = html.xpath('//tr[@class="tableCont"]//text()') try: qas = html.xpath('//tbody//tr//td[@colspan="3"]//text()') question = qas[0] answer = qas[1] # answer = html.xpath('//div[@class="adminRep "]//text()') except Exception as e: print('addr: ' + addr) if not answer: answer = "noanswer" return ''.join(question).replace('\r\n', '').replace('\n', '').replace(' ', '').replace('\t', ''), ''.join( answer).replace('\r\n', '').replace('\n', '').replace(' ', '').replace('\t', '') def operation_process_qa_city_2_1(): # urls = txtRead('load/昆明市公众问题.txt') urls = txtRead('load/昆明市常见问题.txt') qat_list = [] for url in urls: url_a = url url_list = url_a.strip().split('momomomo') qa_url = 'http://xxcx.yn.gov.cn/faq/' + url_list[3] question, answer = process_qa_city_2(qa_url) url_list.append(question) url_list.append(answer) qat_one = 'momomomo'.join(url_list) qat_list.append(qat_one + '\n ') print(len(qat_list)) if len(qat_list) / 250 == 0: txtWrite(qat_list, 'load/qa_昆明市常见问题.txt') output = open('load/qa.pickle', 'wb') pickle.dump(qat_list, output) txtWrite(qat_list, 'load/qa_昆明市常见问题.txt') # 常见问题 def process_city_2_1(addr): headers = { "Host": 'xxcx.yn.gov.cn', "User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36', "Accept": 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', "Accept-Language": 'zh-CN,zh;q=0.9', "Accept-Encoding": 'gzip, deflate', "Connection": 'keep-alive', "Upgrade-Insecure-Requests": "1", "Cache-Control": "max-age=0", "Cookie": Cookie } res = requests.get(addr, headers=headers) # print(res.content) html_content = res.content.decode('gb2312', 'ignore') html = etree.HTML(html_content) # html.xpath('//td[@class="title"]//text()') # tableCont = html.xpath('//tr[@class="tableCont"]//text()') tableCont_herf = html.xpath('//tr[@height="33px"]/td[@width="50%"]//a//@href') tableCont = html.xpath('//tr[@height="33px"]/td//text()') t_all = [] t_list_one = [] for tableCont_0 in tableCont: tableCont_0_replace = tableCont_0.replace('\r', '').replace('\n', '').replace('\t', '').replace(' ', '') if tableCont_0_replace == '': continue if t_all: len_p = len(t_all) else: len_p = 0 t_list_one.append(tableCont_0) if not t_list_one or len(t_list_one) == 3: t_list_one.append(tableCont_herf[len_p]) t_all.append('momomomo'.join(t_list_one) + '\n') t_list_one = [] return t_all # 存储数据,表名 def operation_process_city_2(): res_list = [] # for num_count in range(680): # num_count = 2095 # for i in range(13653-2095): # 常见27-322-1200-1923-2790-4534-4796-5342 # 公众80-699-824-1042-1528-2302-2506-2833-3430-4397-4553-4927-5210-6629-7257-7446-7924-8557-9503-9939-10665-10815-10935-11116-11969-12185-13057 num_count = 0 for i in range(13057): num_count = num_count + 1 # 常见 # addr = 'http://xxcx.yn.gov.cn/faq/areagg_gzwt.jsp?page1=' + str(num_count) + '&partment=null&title=null&person=null&type=null&startdate=null&enddate=null&zhg=null&xzqid=8981' # addr = 'http://xxcx.yn.gov.cn/faq/areagg_gzwt.jsp?page1=' + str(num_count) +'&partment=null&title=null&person=null&type=null&startdate=null&enddate=null&zhg=null&xzqid=1' # 公众 # addr = 'http://xxcx.yn.gov.cn/faq/areagg_cjwt.jsp?enty=null&page2=' + str(num_count) +'&partment=null&title=null&person=null&type=null&startdate=null&enddate=null&zhg=null&xzqid=8981' addr = 'http://xxcx.yn.gov.cn/faq/areagg_cjwt.jsp?enty=null&page2=' + str(num_count) + '&partment=null&title=null&person=null&type=null&startdate=null&enddate=null&zhg=null&xzqid=1' t_all = process_city_2_1(addr) print(num_count) # print(t_all) res_list = res_list + t_all # txtWrite(res_list, 'load/昆明市公众问题.txt') # txtWrite(res_list, 'load/常见提问_95565.txt', type='a+') txtWrite(res_list, '分部门_公众提问_13653.txt', type='a+') # time.sleep(1000) res_list = [] # if num_count % 1000 == 0: # time.sleep(60000) qat_list = [] for url in t_all: url_a = url url_list = url_a.strip().split('momomomo') qa_url = 'http://xxcx.yn.gov.cn/faq/' + url_list[3] question, answer = process_qa_city_2(qa_url) url_list.append(question) url_list.append(answer) qat_one = 'momomomo'.join(url_list) qat_list.append(qat_one + '\n ') print(str(len(qat_list)) + ' question: ' + question) txtWrite(qat_list, 'q_a_昆明市公众问题_95565_20190109.txt', type='a+') # print('sleep') print(datetime.datetime.now()) # time.sleep(6) print('''注意: Cookie需要自己加''') operation_process_city_2() print('''注意: Cookie需要自己加, 否则报错UnicodeEncodeError: 'latin-1' codec can't encode characters in position 6-10: ordinal not in range(256)''')
# -*- coding: utf-8 -*- from urllib import request proxy_handler = request.ProxyHandler({'http': '10.144.1.10:8080'}) # proxy_auth_handler = request.ProxyBasicAuthHandler() # proxy_auth_handler.add_password('realm', 'host', 'username', 'password') opener = request.build_opener(proxy_handler) with opener.open('http://www.pythonchallenge.com/') as f: print('Status:', f.status, f.reason) proxies = {'http': 'http://10.144.1.10:8080/'} opener = request.FancyURLopener(proxies) # 使用指定代理覆盖当前环境设置 with opener.open("http://www.pythonchallenge.com/") as f: print('Status:', f.status, f.reason) with request.urlopen('https://www.bing.com') as f: data = f.read().decode('utf-8') # urlopen返回的对象支持close、read、readline、readlines和迭代等 print('Status:', f.status, f.reason) for k, v in f.getheaders(): print('%s: %s' % (k, v)) # print('Data:', data.decode('utf-8')) # ### 标准库urllib模块 # - URL handling modules # - https://docs.python.org/3/library/urllib.html
# Enlil # # Copyright © 2021 Pedro Pereira, Rafael Arrais # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import unittest from unittest import mock from pipeline.loader.entities.robot import Robot from pipeline.loader.entities.package import Package class TestEntityPackage(unittest.TestCase): __robotic_area = { 'id': 'dummy_area', 'robots': ['dummy_ros1_robot', 'dummy_ros2_robot'] } __robot_ros1_data = { 'id': 'dummy_ros1_robot', 'ros': 'melodic:11311', 'images': ['dummy_image'] } __robot_ros2_data = { 'id': 'dummy_ros2_robot', 'ros': 'foxy:42', 'images': ['dummy_image'] } __robot_ros1 = Robot(__robot_ros1_data, __robotic_area) __robot_ros2 = Robot(__robot_ros2_data, __robotic_area) @mock.patch.object(Package, '_Package__parse_yaml_data') def test_parsing_package(self, mock): """ Test if the same provided data is the one being parsed. """ yaml_data = {'dummy': 'dummy'} package = Package(yaml_data, self.__robot_ros1) mock.assert_called_once_with(yaml_data, self.__robot_ros1) self.assertEqual(package.yaml_data, yaml_data) def test_loading_package_without_id(self): """ Test if execution is terminated if provided data has no required field "id". """ yaml_data = {'dummy': 'dummy'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_invalid_id(self): """ Test if execution is terminated if provided data has an empty "id" field. """ yaml_data = {'id': ''} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_without_path(self): """ Test if execution is terminated if provided data has no required field "path". """ yaml_data = {'id': 'dummy_package', 'command': 'dummy_command'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_invalid_path(self): """ Test if execution is terminated if provided data has an empty "path" field. """ yaml_data = {'id': 'dummy_package', 'path': '', 'command': 'dummy_command'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_without_command(self): """ Test if execution is terminated if provided data has no required field "command". """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_invalid_command(self): """ Test if execution is terminated if provided data has an empty "path" field. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': ''} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_id(self): """ Test if "id" is set properly. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package_id = yaml_data['id'] package = Package(yaml_data, self.__robot_ros1) self.assertEqual(package.id, f"{self.__robot_ros1.id}-{package_id}") def test_loading_package_no_content(self): """ Test if execution is terminated if any of the field "apt", "git" and "rosinstall" are not declared. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_empty_git(self): """ Test if execution is terminated if field "git" is declared but not set. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': []} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_no_git(self): """ Test if no "git clone" command is added when "git" field is not declared. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'apt': ['dummt_apt']} package = Package(yaml_data, self.__robot_ros1) self.assertTrue('git_cmds' not in package.yaml_data) def test_loading_package_git_default_branch(self): """ Test if "git clone" command are properly added when no branch is specified. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_git']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(len(package.yaml_data['git_cmds']), 1) self.assertEqual( package.yaml_data['git_cmds'][0], f"git -C /ros_workspace/src clone -b {self.__robot_ros1_data['ros'].split(':')[0]} {yaml_data['git'][0]}" ) def test_loading_package_git_branch(self): """ Test if "git clone" command are properly added. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_git:branch']} git_repo, git_branch = yaml_data['git'][0].split(':') package = Package(yaml_data, self.__robot_ros1) self.assertEqual(len(package.yaml_data['git_cmds']), 1) self.assertEqual( package.yaml_data['git_cmds'][0], f"git -C /ros_workspace/src clone -b {git_branch} {git_repo}" ) def test_loading_package_empty_apt(self): """ Test if execution is terminated if field "apt" is declared but not set. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'apt': []} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_empty_rosinstall(self): """ Test if execution is terminated if field "rosinstall" is declared but not set. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'rosinstall': []} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros1) self.assertEqual(exception.exception.code, 1) def test_loading_package_ros1_environment_variables(self): """ Test if default environment variables are set properly for ROS1 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(len(package.yaml_data['environment']), 2) self.assertTrue(f"ROS_HOSTNAME={yaml_data['id']}" in package.yaml_data['environment']) self.assertTrue('ROS_MASTER_URI=http://roscore-{{ROBOT_ID}}:{{ROBOT_ROS_PORT}}' in package.yaml_data['environment']) def test_loading_package_ros2_environment_variables(self): """ Test if default environment variables are set properly for ROS2 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(len(package.yaml_data['environment']), 1) self.assertTrue('ROS_DOMAIN_ID={{ROBOT_ROS_DOMAIN}}' in package.yaml_data['environment']) def test_loading_package_ros(self): """ Test if field "ros" is set properly. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(package.yaml_data['ros'], '{{ROBOT_ROS_DISTRO}}') def test_loading_package_ros1_networks(self): """ Test if default networks are properly set for ROS1 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(len(package.yaml_data['networks']), 1) self.assertTrue(f"{self.__robotic_area['id']}-network" in package.yaml_data['networks']) def test_loading_package_ros2_networks(self): """ Test if default networks are properly set for ROS2 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(len(package.yaml_data['networks']), 1) self.assertTrue(f"{self.__robotic_area['id']}-network" in package.yaml_data['networks']) def test_loading_package_ros1_depends_on(self): """ Test if field "depends_on" is properly set for ROS1 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(len(package.yaml_data['depends_on']), 1) self.assertTrue(f"roscore-{self.__robot_ros1.yaml_data['id']}" in package.yaml_data['depends_on']) def test_loading_package_ros2_depends_on(self): """ Test if field "depends_on" is not set for ROS2 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(len(package.yaml_data['depends_on']), 0) def test_loading_package_ros1_restart_default(self): """ Test if field "restart" is properly set to default for ROS1 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(package.yaml_data['restart'], 'always') def test_loading_package_ros1_restart(self): """ Test if field "restart" is properly set when specified for ROS1 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros1) self.assertEqual(package.yaml_data['restart'], yaml_data['restart']) def test_loading_package_ros2_restart_default(self): """ Test if field "restart" is properly set to default for ROS2 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(package.yaml_data['restart'], 'always') def test_loading_package_ros2_restart(self): """ Test if field "restart" is properly set when specified for ROS1 packages. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(package.yaml_data['restart'], yaml_data['restart']) def test_ssh_empty(self): """ Test if execution finishes if field "ssh" is declared empty """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'ssh': []} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros2) self.assertEqual(exception.exception.code, 1) def test_ssh_not_list(self): """ Test if execution finishes if field "ssh" is not a list. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'ssh': 'random_path'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros2) self.assertEqual(exception.exception.code, 1) def test_ssh_not_list_of_files(self): """ Test if execution finishes if field "ssh" is not a list of files. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'ssh': [[], 'dummy']} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros2) self.assertEqual(exception.exception.code, 1) def test_ssh_value_set(self): """ Test if field "ssh" is properly set when defined. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'ssh': ['random_path']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(package.yaml_data['ssh'], yaml_data['ssh']) def test_files_empty(self): """ Test if execution finishes if field "files" is declared empty """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'files': []} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros2) self.assertEqual(exception.exception.code, 1) def test_files_not_list(self): """ Test if execution finishes if field "files" is not a list. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'files': 'random_path'} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros2) self.assertEqual(exception.exception.code, 1) def test_files_not_list_of_files(self): """ Test if execution finishes if field "files" is not a list of files. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'files': [[], 'dummy']} with self.assertRaises(SystemExit) as exception: Package(yaml_data, self.__robot_ros2) self.assertEqual(exception.exception.code, 1) def test_files_value_set(self): """ Test if field "files" is properly set when defined. """ yaml_data = {'id': 'dummy_package', 'path': 'dummy_path', 'command': 'dummy_command', 'git': ['dummy_repo'], 'files': ['random_path']} package = Package(yaml_data, self.__robot_ros2) self.assertEqual(package.yaml_data['files'], yaml_data['files']) if __name__ == '__main__': unittest.main()
import numpy as np import sys import logging import pickle import matplotlib.pyplot as plt from pathlib import Path from scipy.optimize import minimize_scalar from itertools import product import ray import pandas as pd import click from neslab.find import distributions as dists from neslab.find import Model logger = logging.getLogger("model") @ray.remote def job(scale, t_chr, n_nodes, tag): m = Model(scale, "Geometric", t_chr, n_nodes, n_jobs=1) lat = m.disco_latency() log_entry = {"t_chr": t_chr, "n_nodes": n_nodes, "disco_latency": lat, "tag": tag} return log_entry @click.command() @click.option("--redis-password", "-p", type=str, default="pass") @click.option("--head-address", "-a", type=str, default="auto") @click.option( "--infile", "-i", type=click.Path(exists=True), help="File with fitted scale parameters", default="results_scale.csv", ) @click.option( "--outfile", "-o", type=click.Path(dir_okay=False), help="Output file", default="results_density.csv", ) @click.option("-v", "--verbose", count=True, default=1) def main( redis_password: str, head_address: str, infile: click.Path, outfile: click.Path, verbose, ): hnd = logging.StreamHandler() logger.addHandler(hnd) if verbose == 0: logger.setLevel(logging.ERROR) elif verbose == 1: logger.setLevel(logging.WARNING) elif verbose == 2: logger.setLevel(logging.INFO) elif verbose > 2: logger.setLevel(logging.DEBUG) ray.init(address=head_address, _redis_password=redis_password) df = pd.read_csv(infile) t_chr = 25 df = df[df["t_chr"] == t_chr] # scale parameter optimized for two nodes scale_2nodes = df[df["n_nodes"] == 2]["scale"].iat[0] # scale parameter optimized for density rho=1 scale_rho1 = df[df["n_nodes"] == t_chr]["scale"].iat[0] futures = list() for n_nodes in df["n_nodes"].unique(): # scale parameter optimized for real density scale_clairvoyant = df[df["n_nodes"] == n_nodes]["scale"].iat[0] futures.append(job.remote(scale_2nodes, t_chr, n_nodes, "2nodes")) futures.append(job.remote(scale_rho1, t_chr, n_nodes, "rho1")) futures.append(job.remote(scale_clairvoyant, t_chr, n_nodes, "clairvoyant")) logger.info(f"Running {len(futures)} jobs") results = ray.get(futures) df = pd.DataFrame(results) df.to_csv(outfile, index=False) if __name__ == "__main__": main()
import numpy import ctypes def find_dyn_parm_deps(dof, parm_num, regressor_func): ''' Find dynamic parameter dependencies (i.e., regressor column dependencies). ''' samples = 10000 round = 10 pi = numpy.pi Z = numpy.zeros((dof * samples, parm_num)) for i in range(samples): q = [float(numpy.random.random() * 2.0 * pi - pi) for j in range(dof)] dq = [float(numpy.random.random() * 2.0 * pi - pi) for j in range(dof)] ddq = [float(numpy.random.random() * 2.0 * pi - pi) for j in range(dof)] Z[i * dof: i * dof + dof, :] = numpy.matrix( regressor_func(q, dq, ddq) ).reshape(dof, parm_num) R1_diag = numpy.linalg.qr(Z, mode='economic').diagonal().round(round) dbi = [] ddi = [] for i, e in enumerate(R1_diag): if e != 0: dbi.append(i) else: ddi.append(i) dbn = len(dbi) P = numpy.mat(numpy.eye(parm_num))[:, dbi + ddi] Pb = P[:, :dbn] Pd = P[:, dbn:] Rbd1 = numpy.mat(numpy.linalg.qr(Z * P, mode='r')) Rb1 = Rbd1[:dbn, :dbn] Rd1 = Rbd1[:dbn, dbn:] Kd = numpy.mat((numpy.linalg.inv(Rb1) * Rd1).round(round)) return Pb, Pd, Kd
import unittest import random import Spy.SpyStreamHelper as StreamHelper import Spy.SpyInst as SpyInst from Spy.SpyStreamHelper import IntStreamHelper,\ FloatStreamHelper,\ StringStreamHelper,\ BitsStreamHelper,\ DArrayStreamHelper,\ ClassStreamHelper from Spy.SpyInst import SpyDArrayInst class SubDemo(object): def __init__(self): super().__init__() self.a = 0 self.b = 0 def pack(self,field=0,unuse=None): stream_a = StreamHelper.IntStreamHelper().pack(1,self.a) stream_b = StreamHelper.FloatStreamHelper().pack(2,self.b) stream = stream_a + stream_b + b'' return StreamHelper.ClassStreamHelper().stream_assembler(field,stream) def unpack(self,dat): stream = StreamHelper.ClassStreamHelper() stream_dict = stream.stream_splitter(dat) for k,v in stream_dict.items(): if k==1: self.a = StreamHelper.IntStreamHelper().unpack(v) if k==2: self.b = StreamHelper.FloatStreamHelper().unpack(v) return self class Demo(object): def __init__(self): super().__init__() self.a = 0 self.b = 0 self.c = "asdf" self.d = 0 self.e = SubDemo() self.f = SpyInst.SpyDArrayInst(StreamHelper.FloatStreamHelper,2.4) def pack(self,field=0,unuse=None): stream_a = StreamHelper.IntStreamHelper().pack(1,self.a) stream_b = StreamHelper.FloatStreamHelper().pack(2,self.b) stream_c = StreamHelper.StringStreamHelper().pack(3,self.c) stream_d = StreamHelper.BitsStreamHelper(16).pack(4,self.d) stream_e = StreamHelper.ClassStreamHelper().pack(5,self.e) stream_f = StreamHelper.DArrayStreamHelper().pack(6,self.f) stream = stream_a + stream_b + stream_c + stream_d + stream_e + stream_f + b'' return StreamHelper.ClassStreamHelper().stream_assembler(field,stream) def unpack(self,dat): stream = StreamHelper.ClassStreamHelper() stream_dict = stream.stream_splitter(dat) for k,v in stream_dict.items(): if k==1: self.a = StreamHelper.IntStreamHelper().unpack(v) if k==2: self.b = StreamHelper.FloatStreamHelper().unpack(v) if k==3: self.c = StreamHelper.StringStreamHelper().unpack(v) if k==4: self.d = StreamHelper.BitsStreamHelper(16).unpack(v) if k==5: self.e = StreamHelper.ClassStreamHelper(self.e).unpack(v) if k==6: self.f = StreamHelper.DArrayStreamHelper(self.f).unpack(v) return self class TestStreamHelper(unittest.TestCase): def run_once(self,helper,src): stream = helper().pack(0,src) des = helper().unpack(stream) self.assertEqual(src,des) class TestIntStreamHelper(TestStreamHelper): Helper = IntStreamHelper def test_base(self): self.run_once(self.Helper,3) def test_random(self): repeat = 100 for _ in range(repeat): self.run_once(self.Helper,random.randint(0,100000)) class TestFloatStreamHelper(TestStreamHelper): Helper = FloatStreamHelper def test_base(self): self.run_once(self.Helper,3.2) def test_random(self): repeat = 100 for _ in range(repeat): self.run_once(self.Helper,random.uniform(0,100000)) class TestStringStreamHelper(TestStreamHelper): Helper = StringStreamHelper def test_base(self): self.run_once(self.Helper,"test") def test_random(self): repeat = 100 sets = [chr(i) for i in range(0,127)] for _ in range(repeat): string = "".join([random.choice(sets) for x in range(random.randint(0,1000))]) self.run_once(self.Helper,string) class TestBitsStreamHelper(TestStreamHelper): Helper = BitsStreamHelper def run_once(self,helper,width,src): stream = helper(width).pack(0,src) des = helper(width).unpack(stream) self.assertEqual(src,des) def test_base(self): self.run_once(self.Helper,16,200+100*256) def test_random(self): repeat = 100 width = random.randint(0,511) value = random.randint(0,2**width-1) for _ in range(repeat): self.run_once(self.Helper,width,value) class TestDArrayStreamHelper(TestStreamHelper): Helper = DArrayStreamHelper # def run_once(self,helper,src): # print(src) # stream = helper().pack(0,src) # des = helper().unpack(stream) # self.assertEqual(src,des) def test_base(self): src = SpyDArrayInst(IntStreamHelper,1,2) stream = self.Helper().pack(0,src) des = SpyDArrayInst(IntStreamHelper,1) des.unpack(stream) self.assertEqual(self.Helper().pack(0,src), self.Helper().pack(0,des)) def test_base_with_class(self): src = SpyDArrayInst(ClassStreamHelper,SubDemo(),SubDemo()) stream = self.Helper().pack(0,src) des = SpyDArrayInst(ClassStreamHelper,SubDemo()) des.unpack(stream) self.assertEqual(self.Helper().pack(0,src), self.Helper().pack(0,des)) class TestClassStreamHelper(TestStreamHelper): Helper = ClassStreamHelper def test_base(self): src = SubDemo() src.a = 3 stream = src.pack(0) des = SubDemo() des.unpack(stream) self.assertEqual(src.pack(0),des.pack(0)) def test_base_level2(self): src = Demo() src.a = 3 src.e.a = 10 stream = src.pack(0) des = Demo() des.unpack(stream) #print(src,des) src.pack(0) des.pack(0) self.assertEqual(src.pack(0),des.pack(0))
from abc import ABCMeta, abstractmethod import numpy as np import GPy from scipydirect import minimize class BO(object): __metaclass__ = ABCMeta def __init__(self, gp_model, f): self.gp_model = gp_model self.f = f @abstractmethod def acquire(self, x): pass def acquire_minus(self, x): res = -1 * self.acquire(x) return res def next_input(self): res = minimize(self.acquire_minus, self.f.bounds, maxf=self.f.dim * 1000, algmethod=1) return res['x']
# -*- coding: utf-8 -*- """ @contact: lishulong.never@gmail.com @time: 2018/4/10 下午5:38 """
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys from subprocess import check_output, call, STDOUT from distutils import log from distutils.core import Command from setuptools import setup, find_packages from setuptools.command.develop import develop from setuptools.command.sdist import sdist from setuptools.command.install import install from setuptools.command.bdist_egg import bdist_egg from setuptools.command.egg_info import egg_info ROOT = os.path.join(os.path.dirname(os.path.realpath(__file__))) DEVNULL = open(os.devnull, "wb") # Not sure bower was a great idea... VENDOR_TO_KEEP = [ "burpui/static/vendor/bootswatch/slate/bootstrap.min.css", "burpui/static/vendor/bootswatch/fonts/glyphicons-halflings-regular.eot", "burpui/static/vendor/bootswatch/fonts/glyphicons-halflings-regular.svg", "burpui/static/vendor/bootswatch/fonts/glyphicons-halflings-regular.ttf", "burpui/static/vendor/bootswatch/fonts/glyphicons-halflings-regular.woff", "burpui/static/vendor/bootswatch/fonts/glyphicons-halflings-regular.woff2", "burpui/static/vendor/nvd3/build/nv.d3.min.css", "burpui/static/vendor/datatables.net-bs/css/dataTables.bootstrap.min.css", "burpui/static/vendor/datatables.net-responsive-bs/css/responsive.bootstrap.min.css", "burpui/static/vendor/datatables.net-select-bs/css/select.bootstrap.min.css", "burpui/static/vendor/datatables.net-buttons-bs/css/buttons.bootstrap.min.css", "burpui/static/vendor/datatables.net-fixedheader-bs/css/fixedHeader.bootstrap.min.css", "burpui/static/vendor/jquery.fancytree/dist/skin-bootstrap/ui.fancytree.min.css", "burpui/static/vendor/bootstrap-switch/dist/css/bootstrap3/bootstrap-switch.min.css", "burpui/static/vendor/angular-ui-select/dist/select.min.css", "burpui/static/vendor/jquery/dist/jquery.min.js", "burpui/static/vendor/jquery-ui/jquery-ui.min.js", "burpui/static/vendor/bootstrap/dist/js/bootstrap.min.js", "burpui/static/vendor/typeahead.js/dist/typeahead.jquery.min.js", "burpui/static/vendor/d3/d3.min.js", "burpui/static/vendor/nvd3/build/nv.d3.min.js", "burpui/static/vendor/datatables.net/js/jquery.dataTables.min.js", "burpui/static/vendor/datatables.net-bs/js/dataTables.bootstrap.min.js", "burpui/static/vendor/datatables.net-responsive/js/dataTables.responsive.min.js", "burpui/static/vendor/datatables.net-responsive-bs/js/responsive.bootstrap.min.js", "burpui/static/vendor/datatables.net-select/js/dataTables.select.min.js", "burpui/static/vendor/datatables.net-buttons/js/dataTables.buttons.min.js", "burpui/static/vendor/datatables.net-buttons-bs/js/buttons.bootstrap.min.js", "burpui/static/vendor/datatables.net-fixedheader/js/dataTables.fixedHeader.min.js", "burpui/static/vendor/jquery.floatThead/dist/jquery.floatThead.min.js", "burpui/static/vendor/jquery.fancytree/dist/jquery.fancytree-all.min.js", "burpui/static/vendor/jquery-file-download/src/Scripts/jquery.fileDownload.js", "burpui/static/vendor/lodash/dist/lodash.min.js", "burpui/static/vendor/angular/angular.min.js", "burpui/static/vendor/angular-route/angular-route.min.js", "burpui/static/vendor/angular-sanitize/angular-sanitize.min.js", "burpui/static/vendor/angular-resource/angular-resource.min.js", "burpui/static/vendor/angular-animate/angular-animate.min.js", "burpui/static/vendor/bootstrap-switch/dist/js/bootstrap-switch.min.js", "burpui/static/vendor/angular-bootstrap-switch/dist/angular-bootstrap-switch.min.js", "burpui/static/vendor/angular-ui-select/dist/select.min.js", "burpui/static/vendor/angular-strap/dist/angular-strap.min.js", "burpui/static/vendor/angular-strap/dist/angular-strap.tpl.min.js", "burpui/static/vendor/angular-onbeforeunload/build/angular-onbeforeunload.js", "burpui/static/vendor/angular-datatables-0.6.2/dist/angular-datatables.min.js", "burpui/static/vendor/angular-highlightjs/build/angular-highlightjs.min.js", "burpui/static/vendor/moment/min/moment.min.js", "burpui/static/vendor/moment/locale/fr.js", "burpui/static/vendor/moment/locale/es.js", "burpui/static/vendor/moment/locale/it.js", "burpui/static/vendor/moment-timezone/builds/moment-timezone-with-data-10-year-range.min.js", "burpui/static/vendor/angular-ui-calendar/src/calendar.js", "burpui/static/vendor/fullcalendar/dist/fullcalendar.min.css", "burpui/static/vendor/fullcalendar/dist/fullcalendar.print.min.css", "burpui/static/vendor/fullcalendar/dist/fullcalendar.min.js", "burpui/static/vendor/fullcalendar/dist/gcal.min.js", "burpui/static/vendor/fullcalendar/dist/locale/fr.js", "burpui/static/vendor/fullcalendar/dist/locale/es.js", "burpui/static/vendor/fullcalendar/dist/locale/it.js", "burpui/static/vendor/angular-bootstrap/ui-bootstrap.min.js", "burpui/static/vendor/angular-bootstrap/ui-bootstrap-tpls.min.js", "burpui/static/vendor/components-font-awesome/css/font-awesome.min.css", "burpui/static/vendor/components-font-awesome/fonts/FontAwesome.otf", "burpui/static/vendor/components-font-awesome/fonts/fontawesome-webfont.eot", "burpui/static/vendor/components-font-awesome/fonts/fontawesome-webfont.svg", "burpui/static/vendor/components-font-awesome/fonts/fontawesome-webfont.ttf", "burpui/static/vendor/components-font-awesome/fonts/fontawesome-webfont.woff", "burpui/static/vendor/components-font-awesome/fonts/fontawesome-webfont.woff2", "burpui/static/vendor/socket.io-client/dist/socket.io.min.js", "burpui/static/vendor/js-cookie/src/js.cookie.js", "burpui/static/vendor/ace-builds/src-min-noconflict/ace.js", "burpui/static/vendor/ace-builds/src-min-noconflict/mode-json.js", "burpui/static/vendor/ace-builds/src-min-noconflict/worker-json.js", "burpui/static/vendor/ace-builds/src-min-noconflict/theme-ambiance.js", ] for p in VENDOR_TO_KEEP: if not os.path.exists(p): log.info("!! missing: {}".format(p)) class DevelopWithBuildStatic(develop): def install_for_development(self): self.run_command("build_static") return develop.install_for_development(self) class EggWithBuildStatic(egg_info): def initialize_options(self): self.run_command("build_static") return egg_info.initialize_options(self) class BdistWithBuildStatic(bdist_egg): def initialize_options(self): self.run_command("build_static") return bdist_egg.initialize_options(self) class SdistWithBuildStatic(sdist): def make_distribution(self): self.run_command("build_static") return sdist.make_distribution(self) class PyTest(Command): user_options = [] description = "Run tests" def initialize_options(self): pass def finalize_options(self): pass def run(self): try: errno = call(["make", "test"]) raise SystemExit(errno) except OSError: log.error("Looks like the tools to run the tests are missing") class BuildStatic(Command): user_options = [] description = "Install bower dependencies" def initialize_options(self): pass def finalize_options(self): pass def run(self): os.chdir(ROOT) log.info("compiling translations") call( "{} ./burpui -m manage compile-translation".format(sys.executable).split(), stderr=DEVNULL, ) log.info("getting revision number") rev = "stable" ci = os.getenv("CI") commit = os.getenv("CI_COMMIT_SHA") if ( not ci and os.path.exists(".git") and call("which git", shell=True, stderr=STDOUT, stdout=DEVNULL) == 0 ): try: branch = check_output("git rev-parse HEAD", shell=True).rstrip() ver = open(os.path.join("burpui", "VERSION")).read().rstrip() log.info("version: {}".format(ver)) if branch and "dev" in ver: rev = branch try: log.info("revision: {}".format(rev)) with open("burpui/RELEASE", "wb") as f: f.write(rev) except: log.error("Unable to create release file") except: pass elif ci: try: ver = open(os.path.join("burpui", "VERSION")).read().rstrip() if "dev" in ver: rev = commit try: with open("burpui/RELEASE", "wb") as f: f.write(rev) except: pass except: pass else: log.info("using upstream revision") keep = VENDOR_TO_KEEP dirlist = [] for dirname, subdirs, files in os.walk("burpui/static/vendor"): for filename in files: path = os.path.join(dirname, filename) _, ext = os.path.splitext(path) if ( os.path.isfile(path) and path not in keep and filename not in ["bower.json", "package.json"] ): if (rev == "stable" and ext == ".map") or ext != ".map": os.unlink(path) elif os.path.isdir(path): dirlist.append(path) dirlist.sort(reverse=True) for d in dirlist: if os.path.isdir(d) and not os.listdir(d): os.rmdir(d) class CustomInstall(install): def run(self): self.run_command("build_static") install.run(self) def readme(): """ Function used to skip the screenshots part """ desc = "" cpt = 0 skip = False with open(os.path.join(ROOT, "README.rst")) as f: for line in f.readlines(): if line.rstrip() == "Screenshots": skip = True if skip: cpt += 1 if cpt > 6: skip = False if skip: continue desc += line return desc sys.path.insert(0, os.path.join(ROOT)) from burpui.desc import ( __author__, __author_email__, __description__, __url__, __title__, ) # noqa name = __title__ author = __author__ author_email = __author_email__ description = __description__ url = __url__ with open(os.path.join(ROOT, "requirements.txt")) as f: requires = [x.strip() for x in f if x.strip()] dev_requires = ["flake8", "pylint", "black"] test_requires = [ "pytest", "pytest-cov", "pytest-flask", "pytest-mock", "nose", "coverage", "mock", "mockredispy", "Flask-Session", "Celery", "redis", "Flask-SQLAlchemy", "Flask-Migrate", "sqlalchemy_utils", ] datadir = os.path.join("share", "burpui") confdir = os.path.join(datadir, "etc") contrib = os.path.join(datadir, "contrib") migrations = [ ( os.path.join(datadir, root), [os.path.join(root, f) for f in files if not f.endswith(".pyc")], ) for root, dirs, files in os.walk("migrations") ] setup( name=name, version=open(os.path.join(ROOT, "burpui", "VERSION")).read().rstrip(), description=description, long_description=readme(), license=open(os.path.join(ROOT, "LICENSE")).readline().rstrip(), author=author, author_email=author_email, url=url, keywords="burp web ui backup monitoring", packages=find_packages(), include_package_data=True, package_data={ "static": ["burpui/static/*"], "templates": ["burpui/templates/*"], "VERSION": ["burpui/VERSION"], }, entry_points={ "console_scripts": [ "burp-ui=burpui.__main__:server", "bui-celery=burpui.__main__:celery", "bui-manage=burpui.__main__:manage", "bui-agent-legacy=burpui.__main__:agent", "bui-monitor=burpui.__main__:monitor", "burp-ui-legacy=burpui.__main__:legacy", ], }, data_files=[ (confdir, [os.path.join(confdir, "burpui.sample.cfg")]), (os.path.join(contrib, "centos"), ["contrib/centos/init.sh"]), (os.path.join(contrib, "freebsd"), ["contrib/freebsd/gunicorn.rc"]), (os.path.join(contrib, "gunicorn"), ["contrib/gunicorn/burpui_gunicorn.py"]), ( os.path.join(contrib, "systemd"), [ "contrib/systemd/bui-agent.service", "contrib/systemd/bui-celery.service", "contrib/systemd/bui-celery-beat.service", "contrib/systemd/bui-gunicorn.service", "contrib/systemd/bui-monitor.service", "contrib/systemd/bui-websocket.service", ], ), ] + migrations, python_requires=">=3.6", install_requires=requires, extras_require={ "ldap_authentication": ["ldap3"], "gunicorn": ["gunicorn"], "gunicorn-extra": ["redis"], "ci": test_requires, "dev": dev_requires, "celery": ["Celery>=4.3", "redis"], "sql": ["Flask-SQLAlchemy", "Flask-Migrate>=2.1.0", "sqlalchemy-utils"], "limit": ["Flask-Limiter", "redis"], "websocket": ["flask-socketio", "redis", "gevent-websocket"], "rtd": ["sphinxcontrib-httpdomain==1.6.1"], }, tests_require=test_requires, classifiers=[ "Framework :: Flask", "Intended Audience :: System Administrators", "Natural Language :: English", "License :: OSI Approved :: BSD License", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3.6", "Topic :: System :: Archiving :: Backup", "Topic :: System :: Monitoring", ], cmdclass={ "build_static": BuildStatic, "develop": DevelopWithBuildStatic, "sdist": SdistWithBuildStatic, "install": CustomInstall, "bdist_egg": BdistWithBuildStatic, "egg_info": EggWithBuildStatic, }, )
from psyneulink import * import numpy as np # Mechanisms Input = TransferMechanism(name='Input') reward = TransferMechanism(output_ports=[RESULT, MEAN, VARIANCE], name='reward') Decision = DDM(function=DriftDiffusionAnalytical(drift_rate=(1.0, ControlProjection(function=Linear, control_signal_params={ ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)})), threshold=(1.0, ControlProjection(function=Linear, control_signal_params={ ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)})), noise=0.5, starting_point=0, t0=0.45), output_ports=[DECISION_VARIABLE, RESPONSE_TIME, PROBABILITY_UPPER_THRESHOLD], name='Decision') comp = Composition(name="evc") comp.add_node(reward, required_roles=[NodeRole.OUTPUT]) comp.add_node(Decision, required_roles=[NodeRole.OUTPUT]) task_execution_pathway = [Input, IDENTITY_MATRIX, Decision] comp.add_linear_processing_pathway(task_execution_pathway) comp.add_model_based_optimizer(optimizer=OptimizationControlMechanism(name='OCM', agent_rep=comp, features=[Input.input_port, reward.input_port], feature_function=AdaptiveIntegrator(rate=0.5), objective_mechanism=ObjectiveMechanism( name='OCM Objective Mechanism', function=LinearCombination(operation=PRODUCT), monitor=[reward, Decision.output_ports[PROBABILITY_UPPER_THRESHOLD], (Decision.output_ports[RESPONSE_TIME], -1, 1)]), function=GridSearch(), control_signals=[("drift_rate", Decision), ("threshold", Decision)]) ) comp.enable_model_based_optimizer = True stim_list_dict = { Input: [0.5, 0.123], reward: [20, 20] } comp.show_graph(show_controller=True, show_node_structure=ALL) # comp.show_graph(show_controller=True) # comp.run(inputs=stim_list_dict)
# -*- encoding:utf-8 -*- ## Tiff tag values NewSubfileType = { 0: "bit flag 000", 1: "bit flag 001", 2: "bit flag 010", 3: "bit flag 011", 4: "bit flag 100", 5: "bit flag 101", 6: "bit flag 110", 7: "bit flag 111" } SubfileType = { 1: "Full-resolution image data", 2: "Reduced-resolution image data", 3: "Single page of a multi-page image" } Compression = { 1: "Uncompressed", 2: "CCITT 1d", 3: "Group 3 Fax", 4: "Group 4 Fax", 5: "LZW", 6: "JPEG", 7: "JPEG ('new-style' JPEG)", 8: "Deflate ('Adobe-style')", 9: "TIFF-F and TIFF-FX standard (RFC 2301) B&W", 10: "TIFF-F and TIFF-FX standard (RFC 2301) RGB", 32771: "CCITTRLEW", # 16-bit padding 32773: "PACKBITS", 32809: "THUNDERSCAN", 32895: "IT8CTPAD", 32896: "IT8LW", 32897: "IT8MP", 32908: "PIXARFILM", 32909: "PIXARLOG", 32946: "DEFLATE", 32947: "DCS", 34661: "JBIG", 34676: "SGILOG", 34677: "SGILOG24", 34712: "JP2000", } PhotometricInterpretation = { 0: "WhiteIsZero", 1: "BlackIsZero", 2: "RGB", 3: "RGB Palette", 4: "Transparency Mask", 5: "CMYK", 6: "YCbCr", 8: "CIE L*a*b*", 9: "ICC L*a*b*", 10: "ITU L*a*b*", 32803: "CFA", # TIFF/EP, Adobe DNG 32892: "LinearRaw" # Adobe DNG } Thresholding = { 1: "No dithering or halftoning has been applied to the image data", 2: "An ordered dither or halftone technique has been applied to the image data", 3: "A randomized process such as error diffusion has been applied to the image data" } FillOrder = { 1: "Values stored in the higher-order bits of the byte", 2: "Values stored in the lower-order bits of the byte" } Orientation = { # 1 2 3 4 5 6 7 8 # 888888 888888 88 88 8888888888 88 88 8888888888 # 88 88 88 88 88 88 88 88 88 88 88 88 # 8888 8888 8888 8888 88 8888888888 8888888888 88 # 88 88 88 88 # 88 88 888888 888888 1: "Normal", 2: "Fliped left to right", 3: "Rotated 180 deg", 4: "Fliped top to bottom", 5: "Fliped left to right + rotated 90 deg counter clockwise", 6: "Rotated 90 deg counter clockwise", 7: "Fliped left to right + rotated 90 deg clockwise", 8: "Rotated 90 deg clockwise" } PlanarConfiguration = { 1: "Chunky", # format: RGBARGBARGBA...RGBA 2: "Planar" # format: RR...RGG...GBB...BAA...A } GrayResponseUnit = { 1: "Number represents tenths of a unit", 2: "Number represents hundredths of a unit", 3: "Number represents thousandths of a unit", 4: "Number represents ten-thousandths of a unit", 5: "Number represents hundred-thousandths of a unit" } ResolutionUnit = { 1:"No unit", 2:"Inch", 3:"Centimeter" } T4Options = { 0: "bit flag 000", 1: "bit flag 001", 2: "bit flag 010", 3: "bit flag 011", 4: "bit flag 100", 5: "bit flag 101", 6: "bit flag 110", 7: "bit flag 111" } T6Options = { 0: "bit flag 00", 2: "bit flag 10", } Predictor = { 1: "No prediction", 2: "Horizontal differencing", 3: "Floating point horizontal differencing" } CleanFaxData = { 0: "No 'bad' lines", 1: "'bad' lines exist, but were regenerated by the receiver", 2: "'bad' lines exist, but have not been regenerated" } InkSet = { 1:"CMYK", 2:"Not CMYK" } SampleFormat = { 1: "Unsigned integer data", 2: "Two's complement signed integer data", 3: "IEEE floating point data [IEEE]", 4: "Undefined data format" } Indexed = { 0: "Not indexed", 1: "Indexed" } OPIProxy = { 0: "A higher-resolution version of this image does not exist", 1: "A higher-resolution version of this image exists, and the name of that image is found in the ImageID tag" } ProfileType = { 0: "Unspecified", 1: "Group 3 fax" } FaxProfile = { 0: "Does not conform to a profile defined for TIFF for facsimile", 1: "Minimal black & white lossless, Profile S", 2: "Extended black & white lossless, Profile F", 3: "Lossless JBIG black & white, Profile J", 4: "Lossy color and grayscale, Profile C", 5: "Lossless color and grayscale, Profile L", 6: "Mixed Raster Content, Profile M" } CodingMethods = { 0b1 : "Unspecified compression", 0b10 : "1-dimensional coding, ITU-T Rec. T.4 (MH - Modified Huffman)", 0b100 : "2-dimensional coding, ITU-T Rec. T.4 (MR - Modified Read)", 0b1000 : "2-dimensional coding, ITU-T Rec. T.6 (MMR - Modified MR)", 0b10000 : "ITU-T Rec. T.82 coding, using ITU-T Rec. T.85 (JBIG)", 0b100000 : "ITU-T Rec. T.81 (Baseline JPEG)", 0b1000000 : "ITU-T Rec. T.82 coding, using ITU-T Rec. T.43 (JBIG color)" } JPEGProc = { 1: "Baseline sequential process", 14: "Lossless process with Huffman coding" } JPEGLosslessPredictors = { 1: "A", 2: "B", 3: "C", 4: "A+B-C", 5: "A+((B-C)/2)", 6: "B+((A-C)/2)", 7: "(A+B)/2" } YCbCrSubSampling = { (0,1): "YCbCrSubsampleHoriz : ImageWidth of this chroma image is equal to the ImageWidth of the associated luma image", (0,2): "YCbCrSubsampleHoriz : ImageWidth of this chroma image is half the ImageWidth of the associated luma image", (0,4): "YCbCrSubsampleHoriz : ImageWidth of this chroma image is one-quarter the ImageWidth of the associated luma image", (1,1): "YCbCrSubsampleVert : ImageLength (height) of this chroma image is equal to the ImageLength of the associated luma image", (2,2): "YCbCrSubsampleVert : ImageLength (height) of this chroma image is half the ImageLength of the associated luma image", (4,4): "YCbCrSubsampleVert : ImageLength (height) of this chroma image is one-quarter the ImageLength of the associated luma image" } YCbCrPositioning = { 1: "Centered", 2: "Co-sited" } ## EXIF tag values ExposureProgram = { 0: "Not defined", 1: "Manual", 2: "Normal program", 3: "Aperture priority", 4: "Shutter priority", 5: "Creative program (biased toward depth of field)", 6: "Action program (biased toward fast shutter speed)", 7: "Portrait mode (for closeup photos with the background out of focus)", 8: "Landscape mode (for landscape photos with the background in focus)" } MeteringMode = { 0: "Unknown", 1: "Average", 2: "Center Weighted Average", 3: "Spot", 4: "MultiSpot", 5: "Pattern", 6: "Partial", 255: "other" } LightSource = { 0: "Unknown", 1: "Daylight", 2: "Fluorescent", 3: "Tungsten (incandescent light)", 4: "Flash", 9: "Fine weather", 10: "Cloudy weather", 11: "Shade", 12: "Daylight fluorescent (D 5700 - 7100K)", 13: "Day white fluorescent (N 4600 - 5400K)", 14: "Cool white fluorescent (W 3900 - 4500K)", 15: "White fluorescent (WW 3200 - 3700K)", 17: "Standard light A", 18: "Standard light B", 19: "Standard light C", 20: "D55", 21: "D65", 22: "D75", 23: "D50", 24: "ISO studio tungsten", 255: "Other light source" } ColorSpace = { 1: "RGB", 65535: "Uncalibrated" } Flash = { 0x0000: "Flash did not fire", 0x0001: "Flash fired", 0x0005: "Strobe return light not detected", 0x0007: "Strobe return light detected", 0x0008: "On, did not fire", 0x0009: "Flash fired, compulsory flash mode", 0x000D: "Flash fired, compulsory flash mode, return light not detected", 0x000F: "Flash fired, compulsory flash mode, return light detected", 0x0010: "Flash did not fire, compulsory flash mode", 0x0014: "Off, did not fire, return not detected", 0x0018: "Flash did not fire, auto mode", 0x0019: "Flash fired, auto mode", 0x001D: "Flash fired, auto mode, return light not detected", 0x001F: "Flash fired, auto mode, return light detected", 0x0020: "No flash function", 0x0030: "Off, no flash function", 0x0041: "Flash fired, red-eye reduction mode", 0x0045: "Flash fired, red-eye reduction mode, return light not detected", 0x0047: "Flash fired, red-eye reduction mode, return light detected", 0x0049: "Flash fired, compulsory flash mode, red-eye reduction mode", 0x004D: "Flash fired, compulsory flash mode, red-eye reduction mode, return light not detected", 0x004F: "Flash fired, compulsory flash mode, red-eye reduction mode, return light detected", 0x0050: "Off, red-eye reduction", 0x0058: "Auto, Did not fire, red-eye reduction", 0x0059: "Flash fired, auto mode, red-eye reduction mode", 0x005D: "Flash fired, auto mode, return light not detected, red-eye reduction mode", 0x005F: "Flash fired, auto mode, return light detected, red-eye reduction mode" } FocalPlaneResolutionUnit = { 1: "No absolute unit of measurement", 2: "Inch", 3: "Centimeter" } SensingMethod = { 1: "Not defined", 2: "One-chip color area sensor", 3: "Two-chip color area sensor", 4: "Three-chip color area sensor", 5: "Color sequential area sensor", 7: "Trilinear sensor", 8: "Color sequential linear sensor" } CustomRendered = { 0: "Normal process", 1: "Custom process" } ExposureMode = { 0: "Auto exposure", 1: "Manual exposure", 2: "Auto bracket" } WhiteBalance = { 0: "Auto white balance", 1: "Manual white balance" } SceneCaptureType = { 0: "Standard", 1: "Landscape", 2: "Portrait", 3: "Night scene" } GainControl = { 0: "None", 1: "Low gain up", 2: "High gain up", 3: "Low gain down", 4: "High gain down" } Contrast = { 0: "Normal", 1: "Soft", 2: "Hard" } Saturation = { 0: "Normal", 1: "Low saturation", 2: "High saturation" } Sharpness = Contrast SubjectDistanceRange = { 0: "Unknown", 1: "Macro", 2: "Close view", 3: "Distant view" } ## GPS tag values GPSAltitudeRef = { 0: "Above sea level", 1: "Below sea level" } GPSMeasureMode = { b'2': "2-dimensional measurement", b'3': "3-dimensional measurement", b'2\x00': "2-dimensional measurement", b'3\x00': "3-dimensional measurement" } GPSSpeedRef = { b'K': "Kilometers per hour", b'M': "Miles per hour", b'N': "Knots", b'K\x00': "Kilometers per hour", b'M\x00': "Miles per hour", b'N\x00': "Knots" } GPSTrackRef = { b'T': "True direction", b'M': "Magnetic direction", b'T\x00': "True direction", b'M\x00': "Magnetic direction" } GPSImgDirectionRef = GPSTrackRef GPSLatitudeRef = { b'N': "North latitude", b'S': "South latitude", b'N\x00': "North latitude", b'S\x00': "South latitude" } GPSDestLatitudeRef = GPSLatitudeRef GPSLongitudeRef = { b'E': "East longitude", b'W': "West longitude", b'E\x00': "East longitude", b'W\x00': "West longitude" } GPSDestLongitudeRef = GPSLongitudeRef GPSDestBearingRef = GPSTrackRef GPSDestDistanceRef = GPSSpeedRef GPSDifferential = { 0: "Measurement without differential correction", 1: "Differential correction applied" } ## Geotiff tag values GTModelTypeGeoKey = { 0: "Undefined", 1: "Projection Coordinate System", 2: "Geographic (latitude,longitude) System", 3: "Geocentric (X,Y,Z) Coordinate System", } GTRasterTypeGeoKey = { 1: "Raster pixel is area", 2: "Raster pixel is point", } # load from json ProjCoordTransGeoKey = { 1: "CT_TransverseMercator", 2: "CT_TransvMercator_Modified_Alaska", 3: "CT_ObliqueMercator", 4: "CT_ObliqueMercator_Laborde", 5: "CT_ObliqueMercator_Rosenmund", 6: "CT_ObliqueMercator_Spherical", 7: "CT_Mercator", 8: "CT_LambertConfConic_2SP", 9: "CT_LambertConfConic_Helmert", 10: "CT_LambertAzimEqualArea", 11: "CT_AlbersEqualArea", 12: "CT_AzimuthalEquidistant", 13: "CT_EquidistantConic", 14: "CT_Stereographic", 15: "CT_PolarStereographic", 16: "CT_ObliqueStereographic", 17: "CT_Equirectangular", 18: "CT_CassiniSoldner", 19: "CT_Gnomonic", 20: "CT_MillerCylindrical", 21: "CT_Orthographic", 22: "CT_Polyconic", 23: "CT_Robinson", 24: "CT_Sinusoidal", 25: "CT_VanDerGrinten", 26: "CT_NewZealandMapGrid", 27: "CT_TransvMercator_SouthOriented", 28: "User-defined", 32767: "User-defined" } GeogPrimeMeridianGeoKey = { 8901: "Greenwich", 8902: "Lisbon", 8903: "Paris", 8904: "Bogota", 8905: "Madrid", 8906: "Rome", 8907: "Bern", 8908: "Jakarta", 8909: "Ferro", 8910: "Brussels", 8911: "Stockholm", 8912: "Athens", 8913: "Oslo", 8914: "Paris RGS" } GeogLinearUnitsGeoKey = { 1025: "millimetre", 1026: "metres per second", 1027: "millimetres per year", 1033: "centimetre", 1034: "centimetres per year", 1042: "metres per year", 9001: "metre", 9002: "foot", 9003: "US survey foot", 9005: "Clarke's foot", 9014: "fathom", 9030: "nautical mile", 9031: "German legal metre", 9033: "US survey chain", 9034: "US survey link", 9035: "US survey mile", 9036: "kilometre", 9037: "Clarke's yard", 9038: "Clarke's chain", 9039: "Clarke's link", 9040: "British yard (Sears 1922)", 9041: "British foot (Sears 1922)", 9042: "British chain (Sears 1922)", 9043: "British link (Sears 1922)", 9050: "British yard (Benoit 1895 A)", 9051: "British foot (Benoit 1895 A)", 9052: "British chain (Benoit 1895 A)", 9053: "British link (Benoit 1895 A)", 9060: "British yard (Benoit 1895 B)", 9061: "British foot (Benoit 1895 B)", 9062: "British chain (Benoit 1895 B)", 9063: "British link (Benoit 1895 B)", 9070: "British foot (1865)", 9080: "Indian foot", 9081: "Indian foot (1937)", 9082: "Indian foot (1962)", 9083: "Indian foot (1975)", 9084: "Indian yard", 9085: "Indian yard (1937)", 9086: "Indian yard (1962)", 9087: "Indian yard (1975)", 9093: "Statute mile", 9094: "Gold Coast foot", 9095: "British foot (1936)", 9096: "yard", 9097: "chain", 9098: "link", 9099: "British yard (Sears 1922 truncated)", 9204: "Bin width 330 US survey feet", 9205: "Bin width 165 US survey feet", 9206: "Bin width 82.5 US survey feet", 9207: "Bin width 37.5 metres", 9208: "Bin width 25 metres", 9209: "Bin width 12.5 metres", 9210: "Bin width 6.25 metres", 9211: "Bin width 3.125 metres", 9300: "British foot (Sears 1922 truncated)", 9301: "British chain (Sears 1922 truncated)", 9302: "British link (Sears 1922 truncated)" } GeogAngularUnitsGeoKey = { 1031: "milliarc-second", 1032: "milliarc-seconds per year", 1035: "radians per second", 1043: "arc-seconds per year", 9101: "radian", 9102: "degree", 9103: "arc-minute", 9104: "arc-second", 9105: "grad", 9106: "gon", 9107: "degree minute second", 9108: "degree minute second hemisphere", 9109: "microradian", 9110: "sexagesimal DMS", 9111: "sexagesimal DM", 9112: "centesimal minute", 9113: "centesimal second", 9114: "mil_6400", 9115: "degree minute", 9116: "degree hemisphere", 9117: "hemisphere degree", 9118: "degree minute hemisphere", 9119: "hemisphere degree minute", 9120: "hemisphere degree minute second", 9121: "sexagesimal DMS.s", 9122: "degree (supplier to define representation)" } GeogAzimuthUnitsGeoKey = GeogAngularUnitsGeoKey ProjLinearUnitsGeoKey = GeogLinearUnitsGeoKey VerticalUnitsGeoKey = GeogLinearUnitsGeoKey GeogEllipsoidGeoKey = { 1024: "CGCS2000", 7001: "Airy 1830", 7002: "Airy Modified 1849", 7003: "Australian National Spheroid", 7004: "Bessel 1841", 7005: "Bessel Modified", 7006: "Bessel Namibia", 7007: "Clarke 1858", 7008: "Clarke 1866", 7009: "Clarke 1866 Michigan", 7010: "Clarke 1880 (Benoit)", 7011: "Clarke 1880 (IGN)", 7012: "Clarke 1880 (RGS)", 7013: "Clarke 1880 (Arc)", 7014: "Clarke 1880 (SGA 1922)", 7015: "Everest 1830 (1937 Adjustment)", 7016: "Everest 1830 (1967 Definition)", 7018: "Everest 1830 Modified", 7019: "GRS 1980", 7020: "Helmert 1906", 7021: "Indonesian National Spheroid", 7022: "International 1924", 7024: "Krassowsky 1940", 7025: "NWL 9D", 7027: "Plessis 1817", 7028: "Struve 1860", 7029: "War Office", 7030: "WGS 84", 7031: "GEM 10C", 7032: "OSU86F", 7033: "OSU91A", 7034: "Clarke 1880", 7035: "Sphere", 7036: "GRS 1967", 7041: "Average Terrestrial System 1977", 7042: "Everest (1830 Definition)", 7043: "WGS 72", 7044: "Everest 1830 (1962 Definition)", 7045: "Everest 1830 (1975 Definition)", 7046: "Bessel Namibia (GLM)", 7047: "GRS 1980 Authalic Sphere", 7048: "GRS 1980 Authalic Sphere", 7049: "IAG 1975", 7050: "GRS 1967 Modified", 7051: "Danish 1876", 7052: "Clarke 1866 Authalic Sphere", 7053: "Hough 1960", 7054: "PZ-90", 7055: "Clarke 1880 (international foot)", 7056: "Everest 1830 (RSO 1969)", 7057: "International 1924 Authalic Sphere", 7058: "Hughes 1980", 7059: "Popular Visualisation Sphere", 32767: "User-defined" } GeogGeodeticDatumGeoKey = { 1024: "Hungarian Datum 1909", 1025: "Taiwan Datum 1967", 1026: "Taiwan Datum 1997", 1029: "Iraqi Geospatial Reference System", 1031: "MGI 1901", 1032: "MOLDREF99", 1033: "Reseau Geodesique de la RDC 2005", 1034: "Serbian Reference Network 1998", 1035: "Red Geodesica de Canarias 1995", 1036: "Reseau Geodesique de Mayotte 2004", 1037: "Cadastre 1997", 1038: "Reseau Geodesique de Saint Pierre et Miquelon 2006", 1041: "Autonomous Regions of Portugal 2008", 1042: "Mexico ITRF92", 1043: "China 2000", 1044: "Sao Tome", 1045: "New Beijing", 1046: "Principe", 1047: "Reseau de Reference des Antilles Francaises 1991", 1048: "Tokyo 1892", 1052: "System Jednotne Trigonometricke Site Katastralni/05", 1053: "Sri Lanka Datum 1999", 1055: "System Jednotne Trigonometricke Site Katastralni/05 (Ferro)", 1056: "Geocentric Datum Brunei Darussalam 2009", 1057: "Turkish National Reference Frame", 1058: "Bhutan National Geodetic Datum", 1060: "Islands Net 2004", 1061: "International Terrestrial Reference Frame 2008", 1062: "Posiciones Geodesicas Argentinas 2007", 1063: "Marco Geodesico Nacional", 1064: "SIRGAS-Chile", 1065: "Costa Rica 2005", 1066: "Sistema Geodesico Nacional de Panama MACARIO SOLIS", 1067: "Peru96", 1068: "SIRGAS-ROU98", 1069: "SIRGAS_ES2007.8", 1070: "Ocotepeque 1935", 1071: "Sibun Gorge 1922", 1072: "Panama-Colon 1911", 1073: "Reseau Geodesique des Antilles Francaises 2009", 1074: "Corrego Alegre 1961", 1075: "South American Datum 1969(96)", 1076: "Papua New Guinea Geodetic Datum 1994", 1077: "Ukraine 2000", 1078: "Fehmarnbelt Datum 2010", 1081: "Deutsche Bahn Reference System", 1095: "Tonga Geodetic Datum 2005", 1100: "Cayman Islands Geodetic Datum 2011", 1111: "Nepal 1981", 1112: "Cyprus Geodetic Reference System 1993", 1113: "Reseau Geodesique des Terres Australes et Antarctiques Francaises 2007", 1114: "Israeli Geodetic Datum 2005", 1115: "Israeli Geodetic Datum 2005(2012)", 1116: "NAD83 (National Spatial Reference System 2011)", 1117: "NAD83 (National Spatial Reference System PA11)", 1118: "NAD83 (National Spatial Reference System MA11)", 1120: "Mexico ITRF2008", 1128: "Japanese Geodetic Datum 2011", 1132: "Rete Dinamica Nazionale 2008", 1133: "NAD83 (Continuously Operating Reference Station 1996)", 1135: "Aden 1925", 1136: "Bioko", 1137: "Bekaa Valley 1920", 1138: "South East Island 1943", 1139: "Gambia", 1141: "IGS08", 1142: "IG05 Intermediate Datum", 1143: "Israeli Geodetic Datum 2005", 1144: "IG05/12 Intermediate Datum", 1145: "Israeli Geodetic Datum 2005(2012)", 1147: "Oman National Geodetic Datum 2014", 1160: "Kyrgyzstan Geodetic Datum 2006", 6001: "Not specified (based on Airy 1830 ellipsoid)", 6002: "Not specified (based on Airy Modified 1849 ellipsoid)", 6003: "Not specified (based on Australian National Spheroid)", 6004: "Not specified (based on Bessel 1841 ellipsoid)", 6005: "Not specified (based on Bessel Modified ellipsoid)", 6006: "Not specified (based on Bessel Namibia ellipsoid)", 6007: "Not specified (based on Clarke 1858 ellipsoid)", 6008: "Not specified (based on Clarke 1866 ellipsoid)", 6009: "Not specified (based on Clarke 1866 Michigan ellipsoid)", 6010: "Not specified (based on Clarke 1880 (Benoit) ellipsoid)", 6011: "Not specified (based on Clarke 1880 (IGN) ellipsoid)", 6012: "Not specified (based on Clarke 1880 (RGS) ellipsoid)", 6013: "Not specified (based on Clarke 1880 (Arc) ellipsoid)", 6014: "Not specified (based on Clarke 1880 (SGA 1922) ellipsoid)", 6015: "Not specified (based on Everest 1830 (1937 Adjustment) ellipsoid)", 6016: "Not specified (based on Everest 1830 (1967 Definition) ellipsoid)", 6018: "Not specified (based on Everest 1830 Modified ellipsoid)", 6019: "Not specified (based on GRS 1980 ellipsoid)", 6020: "Not specified (based on Helmert 1906 ellipsoid)", 6021: "Not specified (based on Indonesian National Spheroid)", 6022: "Not specified (based on International 1924 ellipsoid)", 6024: "Not specified (based on Krassowsky 1940 ellipsoid)", 6025: "Not specified (based on NWL 9D ellipsoid)", 6027: "Not specified (based on Plessis 1817 ellipsoid)", 6028: "Not specified (based on Struve 1860 ellipsoid)", 6029: "Not specified (based on War Office ellipsoid)", 6030: "Not specified (based on WGS 84 ellipsoid)", 6031: "Not specified (based on GEM 10C ellipsoid)", 6032: "Not specified (based on OSU86F ellipsoid)", 6033: "Not specified (based on OSU91A ellipsoid)", 6034: "Not specified (based on Clarke 1880 ellipsoid)", 6035: "Not specified (based on Authalic Sphere)", 6036: "Not specified (based on GRS 1967 ellipsoid)", 6041: "Not specified (based on Average Terrestrial System 1977 ellipsoid)", 6042: "Not specified (based on Everest (1830 Definition) ellipsoid)", 6043: "Not specified (based on WGS 72 ellipsoid)", 6044: "Not specified (based on Everest 1830 (1962 Definition) ellipsoid)", 6045: "Not specified (based on Everest 1830 (1975 Definition) ellipsoid)", 6047: "Not specified (based on GRS 1980 Authalic Sphere)", 6052: "Not specified (based on Clarke 1866 Authalic Sphere)", 6053: "Not specified (based on International 1924 Authalic Sphere)", 6054: "Not specified (based on Hughes 1980 ellipsoid)", 6055: "Popular Visualisation Datum", 6120: "Greek", 6121: "Greek Geodetic Reference System 1987", 6122: "Average Terrestrial System 1977", 6123: "Kartastokoordinaattijarjestelma (1966)", 6124: "Rikets koordinatsystem 1990", 6125: "Samboja", 6126: "Lithuania 1994 (ETRS89)", 6127: "Tete", 6128: "Madzansua", 6129: "Observatario", 6130: "Moznet (ITRF94)", 6131: "Indian 1960", 6132: "Final Datum 1958", 6133: "Estonia 1992", 6134: "PDO Survey Datum 1993", 6135: "Old Hawaiian", 6136: "St. Lawrence Island", 6137: "St. Paul Island", 6138: "St. George Island", 6139: "Puerto Rico", 6140: "NAD83 Canadian Spatial Reference System", 6141: "Israel 1993", 6142: "Locodjo 1965", 6143: "Abidjan 1987", 6144: "Kalianpur 1937", 6145: "Kalianpur 1962", 6146: "Kalianpur 1975", 6147: "Hanoi 1972", 6148: "Hartebeesthoek94", 6149: "CH1903", 6150: "CH1903+", 6151: "Swiss Terrestrial Reference Frame 1995", 6152: "NAD83 (High Accuracy Reference Network)", 6153: "Rassadiran", 6154: "European Datum 1950(1977)", 6155: "Dabola 1981", 6156: "System Jednotne Trigonometricke Site Katastralni", 6157: "Mount Dillon", 6158: "Naparima 1955", 6159: "European Libyan Datum 1979", 6160: "Chos Malal 1914", 6161: "Pampa del Castillo", 6162: "Korean Datum 1985", 6163: "Yemen National Geodetic Network 1996", 6164: "South Yemen", 6165: "Bissau", 6166: "Korean Datum 1995", 6167: "New Zealand Geodetic Datum 2000", 6168: "Accra", 6169: "American Samoa 1962", 6170: "Sistema de Referencia Geocentrico para America del Sur 1995", 6171: "Reseau Geodesique Francais 1993", 6172: "Posiciones Geodesicas Argentinas", 6173: "IRENET95", 6174: "Sierra Leone Colony 1924", 6175: "Sierra Leone 1968", 6176: "Australian Antarctic Datum 1998", 6178: "Pulkovo 1942(83)", 6179: "Pulkovo 1942(58)", 6180: "Estonia 1997", 6181: "Luxembourg 1930", 6182: "Azores Occidental Islands 1939", 6183: "Azores Central Islands 1948", 6184: "Azores Oriental Islands 1940", 6185: "Madeira 1936", 6188: "OSNI 1952", 6189: "Red Geodesica Venezolana", 6190: "Posiciones Geodesicas Argentinas 1998", 6191: "Albanian 1987", 6192: "Douala 1948", 6193: "Manoca 1962", 6194: "Qornoq 1927", 6195: "Scoresbysund 1952", 6196: "Ammassalik 1958", 6197: "Garoua", 6198: "Kousseri", 6199: "Egypt 1930", 6200: "Pulkovo 1995", 6201: "Adindan", 6202: "Australian Geodetic Datum 1966", 6203: "Australian Geodetic Datum 1984", 6204: "Ain el Abd 1970", 6205: "Afgooye", 6206: "Agadez", 6207: "Lisbon 1937", 6208: "Aratu", 6209: "Arc 1950", 6210: "Arc 1960", 6211: "Batavia", 6212: "Barbados 1938", 6213: "Beduaram", 6214: "Beijing 1954", 6215: "Reseau National Belge 1950", 6216: "Bermuda 1957", 6218: "Bogota 1975", 6219: "Bukit Rimpah", 6220: "Camacupa", 6221: "Campo Inchauspe", 6222: "Cape", 6223: "Carthage", 6224: "Chua", 6225: "Corrego Alegre 1970-72", 6226: "Cote d'Ivoire", 6227: "Deir ez Zor", 6228: "Douala", 6229: "Egypt 1907", 6230: "European Datum 1950", 6231: "European Datum 1987", 6232: "Fahud", 6233: "Gandajika 1970", 6234: "Garoua", 6235: "Guyane Francaise", 6236: "Hu Tzu Shan 1950", 6237: "Hungarian Datum 1972", 6238: "Indonesian Datum 1974", 6239: "Indian 1954", 6240: "Indian 1975", 6241: "Jamaica 1875", 6242: "Jamaica 1969", 6243: "Kalianpur 1880", 6244: "Kandawala", 6245: "Kertau 1968", 6246: "Kuwait Oil Company", 6247: "La Canoa", 6248: "Provisional South American Datum 1956", 6249: "Lake", 6250: "Leigon", 6251: "Liberia 1964", 6252: "Lome", 6253: "Luzon 1911", 6254: "Hito XVIII 1963", 6255: "Herat North", 6256: "Mahe 1971", 6257: "Makassar", 6258: "European Terrestrial Reference System 1989", 6259: "Malongo 1987", 6260: "Manoca", 6261: "Merchich", 6262: "Massawa", 6263: "Minna", 6264: "Mhast", 6265: "Monte Mario", 6266: "M'poraloko", 6267: "North American Datum 1927", 6268: "NAD27 Michigan", 6269: "North American Datum 1983", 6270: "Nahrwan 1967", 6271: "Naparima 1972", 6272: "New Zealand Geodetic Datum 1949", 6273: "NGO 1948", 6274: "Datum 73", 6275: "Nouvelle Triangulation Francaise", 6276: "NSWC 9Z-2", 6277: "OSGB 1936", 6278: "OSGB 1970 (SN)", 6279: "OS (SN) 1980", 6280: "Padang 1884", 6281: "Palestine 1923", 6282: "Congo 1960 Pointe Noire", 6283: "Geocentric Datum of Australia 1994", 6284: "Pulkovo 1942", 6285: "Qatar 1974", 6286: "Qatar 1948", 6287: "Qornoq", 6288: "Loma Quintana", 6289: "Amersfoort", 6291: "South American Datum 1969", 6292: "Sapper Hill 1943", 6293: "Schwarzeck", 6294: "Segora", 6295: "Serindung", 6296: "Sudan", 6297: "Tananarive 1925", 6298: "Timbalai 1948", 6299: "TM65", 6300: "Geodetic Datum of 1965", 6301: "Tokyo", 6302: "Trinidad 1903", 6303: "Trucial Coast 1948", 6304: "Voirol 1875", 6306: "Bern 1938", 6307: "Nord Sahara 1959", 6308: "Stockholm 1938", 6309: "Yacare", 6310: "Yoff", 6311: "Zanderij", 6312: "Militar-Geographische Institut", 6313: "Reseau National Belge 1972", 6314: "Deutsches Hauptdreiecksnetz", 6315: "Conakry 1905", 6316: "Dealul Piscului 1930", 6317: "Dealul Piscului 1970", 6318: "National Geodetic Network", 6319: "Kuwait Utility", 6322: "World Geodetic System 1972", 6324: "WGS 72 Transit Broadcast Ephemeris", 6326: "World Geodetic System 1984", 6600: "Anguilla 1957", 6601: "Antigua 1943", 6602: "Dominica 1945", 6603: "Grenada 1953", 6604: "Montserrat 1958", 6605: "St. Kitts 1955", 6606: "St. Lucia 1955", 6607: "St. Vincent 1945", 6608: "North American Datum 1927 (1976)", 6609: "North American Datum 1927 (CGQ77)", 6610: "Xian 1980", 6611: "Hong Kong 1980", 6612: "Japanese Geodetic Datum 2000", 6613: "Gunung Segara", 6614: "Qatar National Datum 1995", 6615: "Porto Santo 1936", 6616: "Selvagem Grande", 6618: "South American Datum 1969", 6619: "SWEREF99", 6620: "Point 58", 6621: "Fort Marigot", 6622: "Guadeloupe 1948", 6623: "Centre Spatial Guyanais 1967", 6624: "Reseau Geodesique Francais Guyane 1995", 6625: "Martinique 1938", 6626: "Reunion 1947", 6627: "Reseau Geodesique de la Reunion 1992", 6628: "Tahiti 52", 6629: "Tahaa 54", 6630: "IGN72 Nuku Hiva", 6631: "K0 1949", 6632: "Combani 1950", 6633: "IGN56 Lifou", 6634: "IGN72 Grande Terre", 6635: "ST87 Ouvea", 6636: "Petrels 1972", 6637: "Pointe Geologie Perroud 1950", 6638: "Saint Pierre et Miquelon 1950", 6639: "MOP78", 6640: "Reseau de Reference des Antilles Francaises 1991", 6641: "IGN53 Mare", 6642: "ST84 Ile des Pins", 6643: "ST71 Belep", 6644: "NEA74 Noumea", 6645: "Reseau Geodesique Nouvelle Caledonie 1991", 6646: "Grand Comoros", 6647: "International Terrestrial Reference Frame 1988", 6648: "International Terrestrial Reference Frame 1989", 6649: "International Terrestrial Reference Frame 1990", 6650: "International Terrestrial Reference Frame 1991", 6651: "International Terrestrial Reference Frame 1992", 6652: "International Terrestrial Reference Frame 1993", 6653: "International Terrestrial Reference Frame 1994", 6654: "International Terrestrial Reference Frame 1996", 6655: "International Terrestrial Reference Frame 1997", 6656: "International Terrestrial Reference Frame 2000", 6657: "Reykjavik 1900", 6658: "Hjorsey 1955", 6659: "Islands Net 1993", 6660: "Helle 1954", 6661: "Latvia 1992", 6663: "Porto Santo 1995", 6664: "Azores Oriental Islands 1995", 6665: "Azores Central Islands 1995", 6666: "Lisbon 1890", 6667: "Iraq-Kuwait Boundary Datum 1992", 6668: "European Datum 1979", 6670: "Istituto Geografico Militaire 1995", 6671: "Voirol 1879", 6672: "Chatham Islands Datum 1971", 6673: "Chatham Islands Datum 1979", 6674: "Sistema de Referencia Geocentrico para las AmericaS 2000", 6675: "Guam 1963", 6676: "Vientiane 1982", 6677: "Lao 1993", 6678: "Lao National Datum 1997", 6679: "Jouik 1961", 6680: "Nouakchott 1965", 6681: "Mauritania 1999", 6682: "Gulshan 303", 6683: "Philippine Reference System 1992", 6684: "Gan 1970", 6685: "Gandajika", 6686: "Marco Geocentrico Nacional de Referencia", 6687: "Reseau Geodesique de la Polynesie Francaise", 6688: "Fatu Iva 72", 6689: "IGN63 Hiva Oa", 6690: "Tahiti 79", 6691: "Moorea 87", 6692: "Maupiti 83", 6693: "Nakhl-e Ghanem", 6694: "Posiciones Geodesicas Argentinas 1994", 6695: "Katanga 1955", 6696: "Kasai 1953", 6697: "IGC 1962 Arc of the 6th Parallel South", 6698: "IGN 1962 Kerguelen", 6699: "Le Pouce 1934", 6700: "IGN Astro 1960", 6701: "Institut Geographique du Congo Belge 1955", 6702: "Mauritania 1999", 6703: "Missao Hidrografico Angola y Sao Tome 1951", 6704: "Mhast (onshore)", 6705: "Mhast (offshore)", 6706: "Egypt Gulf of Suez S-650 TL", 6707: "Tern Island 1961", 6708: "Cocos Islands 1965", 6709: "Iwo Jima 1945", 6710: "St. Helena 1971", 6711: "Marcus Island 1952", 6712: "Ascension Island 1958", 6713: "Ayabelle Lighthouse", 6714: "Bellevue", 6715: "Camp Area Astro", 6716: "Phoenix Islands 1966", 6717: "Cape Canaveral", 6718: "Solomon 1968", 6719: "Easter Island 1967", 6720: "Fiji Geodetic Datum 1986", 6721: "Fiji 1956", 6722: "South Georgia 1968", 6723: "Grand Cayman Geodetic Datum 1959", 6724: "Diego Garcia 1969", 6725: "Johnston Island 1961", 6726: "Sister Islands Geodetic Datum 1961", 6727: "Midway 1961", 6728: "Pico de las Nieves 1984", 6729: "Pitcairn 1967", 6730: "Santo 1965", 6731: "Viti Levu 1916", 6732: "Marshall Islands 1960", 6733: "Wake Island 1952", 6734: "Tristan 1968", 6735: "Kusaie 1951", 6736: "Deception Island", 6737: "Geocentric datum of Korea", 6738: "Hong Kong 1963", 6739: "Hong Kong 1963(67)", 6740: "Parametrop Zemp 1990", 6741: "Faroe Datum 1954", 6742: "Geodetic Datum of Malaysia 2000", 6743: "Karbala 1979", 6744: "Nahrwan 1934", 6745: "Rauenberg Datum/83", 6746: "Potsdam Datum/83", 6747: "Greenland 1996", 6748: "Vanua Levu 1915", 6749: "Reseau Geodesique de Nouvelle Caledonie 91-93", 6750: "ST87 Ouvea", 6751: "Kertau (RSO)", 6752: "Viti Levu 1912", 6753: "fk89", 6754: "Libyan Geodetic Datum 2006", 6755: "Datum Geodesi Nasional 1995", 6756: "Vietnam 2000", 6757: "SVY21", 6758: "Jamaica 2001", 6759: "NAD83 (National Spatial Reference System 2007)", 6760: "World Geodetic System 1966", 6761: "Croatian Terrestrial Reference System", 6762: "Bermuda 2000", 6763: "Pitcairn 2006", 6764: "Ross Sea Region Geodetic Datum 2000", 6765: "Slovenia Geodetic Datum 1996", 6801: "CH1903 (Bern)", 6802: "Bogota 1975 (Bogota)", 6803: "Lisbon 1937 (Lisbon)", 6804: "Makassar (Jakarta)", 6805: "Militar-Geographische Institut (Ferro)", 6806: "Monte Mario (Rome)", 6807: "Nouvelle Triangulation Francaise (Paris)", 6808: "Padang 1884 (Jakarta)", 6809: "Reseau National Belge 1950 (Brussels)", 6810: "Tananarive 1925 (Paris)", 6811: "Voirol 1875 (Paris)", 6813: "Batavia (Jakarta)", 6814: "Stockholm 1938 (Stockholm)", 6815: "Greek (Athens)", 6816: "Carthage (Paris)", 6817: "NGO 1948 (Oslo)", 6818: "System Jednotne Trigonometricke Site Katastralni (Ferro)", 6819: "Nord Sahara 1959 (Paris)", 6820: "Gunung Segara (Jakarta)", 6821: "Voirol 1879 (Paris)", 6896: "International Terrestrial Reference Frame 2005", 6901: "Ancienne Triangulation Francaise (Paris)", 6902: "Nord de Guerre (Paris)", 6903: "Madrid 1870 (Madrid)", 6904: "Lisbon 1890 (Lisbon)", 32767: "User-defined" } VerticalDatumGeoKey = { 1027: "EGM2008 geoid", 1028: "Fao 1979", 1030: "N2000", 1039: "New Zealand Vertical Datum 2009", 1040: "Dunedin-Bluff 1960", 1049: "Incheon", 1050: "Trieste", 1051: "Genoa", 1054: "Sri Lanka Vertical Datum", 1059: "Faroe Islands Vertical Reference 2009", 1079: "Fehmarnbelt Vertical Reference 2010", 1080: "Lowest Astronomic Tide", 1082: "Highest Astronomic Tide", 1083: "Lower Low Water Large Tide", 1084: "Higher High Water Large Tide", 1085: "Indian Spring Low Water", 1086: "Mean Lower Low Water Spring Tides", 1087: "Mean Low Water Spring Tides", 1088: "Mean High Water Spring Tides", 1089: "Mean Lower Low Water", 1090: "Mean Higher High Water", 1091: "Mean Low Water", 1092: "Mean High Water", 1093: "Low Water", 1094: "High Water", 1096: "Norway Normal Null 2000", 1097: "Grand Cayman Vertical Datum 1954", 1098: "Little Cayman Vertical Datum 1961", 1099: "Cayman Brac Vertical Datum 1961", 1101: "Cais da Pontinha - Funchal", 1102: "Cais da Vila - Porto Santo", 1103: "Cais das Velas", 1104: "Horta", 1105: "Cais da Madalena", 1106: "Santa Cruz da Graciosa", 1107: "Cais da Figueirinha - Angra do Heroismo", 1108: "Santa Cruz das Flores", 1109: "Cais da Vila do Porto", 1110: "Ponta Delgada", 1119: "Northern Marianas Vertical Datum of 2003", 1121: "Tutuila Vertical Datum of 1962", 1122: "Guam Vertical Datum of 1963", 1123: "Puerto Rico Vertical Datum of 2002", 1124: "Virgin Islands Vertical Datum of 2009", 1125: "American Samoa Vertical Datum of 2002", 1126: "Guam Vertical Datum of 2004", 1127: "Canadian Geodetic Vertical Datum of 2013", 1129: "Japanese Standard Levelling Datum 1972", 1130: "Japanese Geodetic Datum 2000 (vertical)", 1131: "Japanese Geodetic Datum 2011 (vertical)", 1140: "Singapore Height Datum", 1146: "Ras Ghumays", 1148: "Famagusta 1960", 1149: "PNG08", 1150: "Kumul 34", 1151: "Kiunga", 1161: "Deutsches Haupthoehennetz 1912", 1162: "Latvian Height System 2000", 5100: "Mean Sea Level", 5101: "Ordnance Datum Newlyn", 5102: "National Geodetic Vertical Datum 1929", 5103: "North American Vertical Datum 1988", 5104: "Yellow Sea 1956", 5105: "Baltic Sea", 5106: "Caspian Sea", 5107: "Nivellement general de la France", 5109: "Normaal Amsterdams Peil", 5110: "Ostend", 5111: "Australian Height Datum", 5112: "Australian Height Datum (Tasmania)", 5113: "Instantaneous Water Level", 5114: "Canadian Geodetic Vertical Datum of 1928", 5115: "Piraeus Harbour 1986", 5116: "Helsinki 1960", 5117: "Rikets hojdsystem 1970", 5118: "Nivellement General de la France - Lallemand", 5119: "Nivellement General de la France - IGN69", 5120: "Nivellement General de la France - IGN78", 5121: "Maputo", 5122: "Japanese Standard Levelling Datum 1969", 5123: "PDO Height Datum 1993", 5124: "Fahud Height Datum", 5125: "Ha Tien 1960", 5126: "Hon Dau 1992", 5127: "Landesnivellement 1902", 5128: "Landeshohennetz 1995", 5129: "European Vertical Reference Frame 2000", 5130: "Malin Head", 5131: "Belfast Lough", 5132: "Dansk Normal Nul", 5133: "AIOC 1995", 5134: "Black Sea", 5135: "Hong Kong Principal Datum", 5136: "Hong Kong Chart Datum", 5137: "Yellow Sea 1985", 5138: "Ordnance Datum Newlyn (Orkney Isles)", 5139: "Fair Isle", 5140: "Lerwick", 5141: "Foula", 5142: "Sule Skerry", 5143: "North Rona", 5144: "Stornoway", 5145: "St Kilda", 5146: "Flannan Isles", 5147: "St Marys", 5148: "Douglas", 5149: "Fao", 5150: "Bandar Abbas", 5151: "Nivellement General de Nouvelle Caledonie", 5152: "Poolbeg", 5153: "Nivellement General Guyanais 1977", 5154: "Martinique 1987", 5155: "Guadeloupe 1988", 5156: "Reunion 1989", 5157: "Auckland 1946", 5158: "Bluff 1955", 5159: "Dunedin 1958", 5160: "Gisborne 1926", 5161: "Lyttelton 1937", 5162: "Moturiki 1953", 5163: "Napier 1962", 5164: "Nelson 1955", 5165: "One Tree Point 1964", 5166: "Tararu 1952", 5167: "Taranaki 1970", 5168: "Wellington 1953", 5169: "Waitangi (Chatham Island) 1959", 5170: "Stewart Island 1977", 5171: "EGM96 geoid", 5172: "Nivellement General du Luxembourg", 5173: "Antalya", 5174: "Norway Normal Null 1954", 5175: "Durres", 5176: "Gebrauchshohen ADRIA", 5177: "National Vertical Network 1999", 5178: "Cascais", 5179: "Constanta", 5180: "Alicante", 5181: "Deutsches Haupthoehennetz 1992", 5182: "Deutsches Haupthoehennetz 1985", 5183: "Staatlichen Nivellementnetzes 1976", 5184: "Baltic 1982", 5185: "Baltic 1980", 5186: "Kuwait PWD", 5187: "KOC Well Datum", 5188: "KOC Construction Datum", 5189: "Nivellement General de la Corse 1948", 5190: "Danger 1950", 5191: "Mayotte 1950", 5192: "Martinique 1955", 5193: "Guadeloupe 1951", 5194: "Lagos 1955", 5195: "Nivellement General de Polynesie Francaise", 5196: "IGN 1966", 5197: "Moorea SAU 1981", 5198: "Raiatea SAU 2001", 5199: "Maupiti SAU 2001", 5200: "Huahine SAU 2001", 5201: "Tahaa SAU 2001", 5202: "Bora Bora SAU 2001", 5203: "EGM84 geoid", 5204: "International Great Lakes Datum 1955", 5205: "International Great Lakes Datum 1985", 5206: "Dansk Vertikal Reference 1990", 5207: "Croatian Vertical Reference System 1971", 5208: "Rikets hojdsystem 2000", 5209: "Rikets hojdsystem 1900", 5210: "IGN 1988 LS", 5211: "IGN 1988 MG", 5212: "IGN 1992 LD", 5213: "IGN 1988 SB", 5214: "IGN 1988 SM", 5215: "European Vertical Reference Frame 2007", 32767: "User-defined" } VerticalCSTypeGeoKey = { 3855: "EGM2008 height", 3886: "Fao 1979 height", 3900: "N2000 height", 4440: "NZVD2009 height", 4458: "Dunedin-Bluff 1960 height", 5193: "Incheon height", 5195: "Trieste height", 5214: "Genoa height", 5237: "SLVD height", 5317: "FVR09 height", 5336: "Black Sea depth", 5597: "FCSVR10 height", 5600: "NGPF height", 5601: "IGN 1966 height", 5602: "Moorea SAU 1981 height", 5603: "Raiatea SAU 2001 height", 5604: "Maupiti SAU 2001 height", 5605: "Huahine SAU 2001 height", 5606: "Tahaa SAU 2001 height", 5607: "Bora Bora SAU 2001 height", 5608: "IGLD 1955 height", 5609: "IGLD 1985 height", 5610: "HVRS71 height", 5611: "Caspian height", 5612: "Baltic depth", 5613: "RH2000 height", 5614: "KOC WD depth (ft)", 5615: "RH00 height", 5616: "IGN 1988 LS height", 5617: "IGN 1988 MG height", 5618: "IGN 1992 LD height", 5619: "IGN 1988 SB height", 5620: "IGN 1988 SM height", 5621: "EVRF2007 height", 5701: "ODN height", 5702: "NGVD29 height", 5703: "NAVD88 height", 5704: "Yellow Sea", 5705: "Baltic height", 5706: "Caspian depth", 5709: "NAP height", 5710: "Ostend height", 5711: "AHD height", 5712: "AHD (Tasmania) height", 5713: "CGVD28 height", 5714: "MSL height", 5715: "MSL depth", 5716: "Piraeus height", 5717: "N60 height", 5718: "RH70 height", 5719: "NGF Lallemand height", 5720: "NGF-IGN69 height", 5721: "NGF-IGN78 height", 5722: "Maputo height", 5723: "JSLD69 height", 5724: "PHD93 height", 5725: "Fahud HD height", 5726: "Ha Tien 1960 height", 5727: "Hon Dau 1992 height", 5728: "LN02 height", 5729: "LHN95 height", 5730: "EVRF2000 height", 5731: "Malin Head height", 5732: "Belfast height", 5733: "DNN height", 5734: "AIOC95 depth", 5735: "Black Sea height", 5736: "Yellow Sea 1956 height", 5737: "Yellow Sea 1985 height", 5738: "HKPD height", 5739: "HKCD depth", 5740: "ODN Orkney height", 5741: "Fair Isle height", 5742: "Lerwick height", 5743: "Foula height", 5744: "Sule Skerry height", 5745: "North Rona height", 5746: "Stornoway height", 5747: "St Kilda height", 5748: "Flannan Isles height", 5749: "St Marys height", 5750: "Douglas height", 5751: "Fao height", 5752: "Bandar Abbas height", 5753: "NGNC height", 5754: "Poolbeg height", 5755: "NGG1977 height", 5756: "Martinique 1987 height", 5757: "Guadeloupe 1988 height", 5758: "Reunion 1989 height", 5759: "Auckland 1946 height", 5760: "Bluff 1955 height", 5761: "Dunedin 1958 height", 5762: "Gisborne 1926 height", 5763: "Lyttelton 1937 height", 5764: "Moturiki 1953 height", 5765: "Napier 1962 height", 5766: "Nelson 1955 height", 5767: "One Tree Point 1964 height", 5768: "Tararu 1952 height", 5769: "Taranaki 1970 height", 5770: "Wellington 1953 height", 5771: "Chatham Island 1959 height", 5772: "Stewart Island 1977 height", 5773: "EGM96 height", 5774: "NG-L height", 5775: "Antalya height", 5776: "NN54 height", 5777: "Durres height", 5778: "GHA height", 5779: "NVN99 height", 5780: "Cascais height", 5781: "Constanta height", 5782: "Alicante height", 5783: "DHHN92 height", 5784: "DHHN85 height", 5785: "SNN76 height", 5786: "Baltic 1982 height", 5787: "EOMA 1980 height", 5788: "Kuwait PWD height", 5789: "KOC WD depth", 5790: "KOC CD height", 5791: "NGC 1948 height", 5792: "Danger 1950 height", 5793: "Mayotte 1950 height", 5794: "Martinique 1955 height", 5795: "Guadeloupe 1951 height", 5796: "Lagos 1955 height", 5797: "AIOC95 height", 5798: "EGM84 height", 5799: "DVR90 height", 5829: "Instantaneous Water Level height", 5831: "Instantaneous Water Level depth", 5843: "Ras Ghumays height", 5861: "LAT depth", 5862: "LLWLT depth", 5863: "ISLW depth", 5864: "MLLWS depth", 5865: "MLWS depth", 5866: "MLLW depth", 5867: "MLW depth", 5868: "MHW height", 5869: "MHHW height", 5870: "MHWS height", 5871: "HHWLT height", 5872: "HAT height", 5873: "Low Water depth", 5874: "High Water height", 5941: "NN2000 height", 6130: "GCVD54 height", 6131: "LCVD61 height", 6132: "CBVD61 height", 6178: "Cais da Pontinha - Funchal height", 6179: "Cais da Vila - Porto Santo height", 6180: "Cais das Velas height", 6181: "Horta height", 6182: "Cais da Madalena height", 6183: "Santa Cruz da Graciosa height", 6184: "Cais da Figueirinha - Angra do Heroismo height", 6185: "Santa Cruz das Flores height", 6186: "Cais da Vila do Porto height", 6187: "Ponta Delgada height", 6357: "NAVD88 depth", 6358: "NAVD88 depth (ftUS)", 6359: "NGVD29 depth", 6360: "NAVD88 height (ftUS)", 6638: "Tutuila 1962 height", 6639: "Guam 1963 height", 6640: "NMVD03 height", 6641: "PRVD02 height", 6642: "VIVD09 height", 6643: "ASVD02 height", 6644: "GUVD04 height", 6647: "CGVD2013 height", 6693: "JSLD72 height", 6694: "JGD2000 (vertical) height", 6695: "JGD2011 (vertical) height", 6916: "SHD height", 7446: "Famagusta 1960 height", 7447: "PNG08 height", 7651: "Kumul 34 height", 7652: "Kiunga height", 7699: "DHHN12 height", 7700: "Latvia 2000 height", 32767: "User-defined" } GeographicTypeGeoKey = { 3819: "HD1909", 3821: "TWD67", 3824: "TWD97", 3889: "IGRS", 3906: "MGI 1901", 4001: "Unknown datum based upon the Airy 1830 ellipsoid", 4002: "Unknown datum based upon the Airy Modified 1849 ellipsoid", 4003: "Unknown datum based upon the Australian National Spheroid", 4004: "Unknown datum based upon the Bessel 1841 ellipsoid", 4005: "Unknown datum based upon the Bessel Modified ellipsoid", 4006: "Unknown datum based upon the Bessel Namibia ellipsoid", 4007: "Unknown datum based upon the Clarke 1858 ellipsoid", 4008: "Unknown datum based upon the Clarke 1866 ellipsoid", 4009: "Unknown datum based upon the Clarke 1866 Michigan ellipsoid", 4010: "Unknown datum based upon the Clarke 1880 (Benoit) ellipsoid", 4011: "Unknown datum based upon the Clarke 1880 (IGN) ellipsoid", 4012: "Unknown datum based upon the Clarke 1880 (RGS) ellipsoid", 4013: "Unknown datum based upon the Clarke 1880 (Arc) ellipsoid", 4014: "Unknown datum based upon the Clarke 1880 (SGA 1922) ellipsoid", 4015: "Unknown datum based upon the Everest 1830 (1937 Adjustment) ellipsoid", 4016: "Unknown datum based upon the Everest 1830 (1967 Definition) ellipsoid", 4018: "Unknown datum based upon the Everest 1830 Modified ellipsoid", 4019: "Unknown datum based upon the GRS 1980 ellipsoid", 4020: "Unknown datum based upon the Helmert 1906 ellipsoid", 4021: "Unknown datum based upon the Indonesian National Spheroid", 4022: "Unknown datum based upon the International 1924 ellipsoid", 4023: "MOLDREF99", 4024: "Unknown datum based upon the Krassowsky 1940 ellipsoid", 4025: "Unknown datum based upon the NWL 9D ellipsoid", 4027: "Unknown datum based upon the Plessis 1817 ellipsoid", 4028: "Unknown datum based upon the Struve 1860 ellipsoid", 4029: "Unknown datum based upon the War Office ellipsoid", 4030: "Unknown datum based upon the WGS 84 ellipsoid", 4031: "Unknown datum based upon the GEM 10C ellipsoid", 4032: "Unknown datum based upon the OSU86F ellipsoid", 4033: "Unknown datum based upon the OSU91A ellipsoid", 4034: "Unknown datum based upon the Clarke 1880 ellipsoid", 4035: "Unknown datum based upon the Authalic Sphere", 4036: "Unknown datum based upon the GRS 1967 ellipsoid", 4041: "Unknown datum based upon the Average Terrestrial System 1977 ellipsoid", 4042: "Unknown datum based upon the Everest (1830 Definition) ellipsoid", 4043: "Unknown datum based upon the WGS 72 ellipsoid", 4044: "Unknown datum based upon the Everest 1830 (1962 Definition) ellipsoid", 4045: "Unknown datum based upon the Everest 1830 (1975 Definition) ellipsoid", 4046: "RGRDC 2005", 4047: "Unspecified datum based upon the GRS 1980 Authalic Sphere", 4052: "Unspecified datum based upon the Clarke 1866 Authalic Sphere", 4053: "Unspecified datum based upon the International 1924 Authalic Sphere", 4054: "Unspecified datum based upon the Hughes 1980 ellipsoid", 4055: "Popular Visualisation CRS", 4075: "SREF98", 4081: "REGCAN95", 4120: "Greek", 4121: "GGRS87", 4122: "ATS77", 4123: "KKJ", 4124: "RT90", 4125: "Samboja", 4126: "LKS94 (ETRS89)", 4127: "Tete", 4128: "Madzansua", 4129: "Observatario", 4130: "Moznet", 4131: "Indian 1960", 4132: "FD58", 4133: "EST92", 4134: "PSD93", 4135: "Old Hawaiian", 4136: "St. Lawrence Island", 4137: "St. Paul Island", 4138: "St. George Island", 4139: "Puerto Rico", 4140: "NAD83(CSRS98)", 4141: "Israel 1993", 4142: "Locodjo 1965", 4143: "Abidjan 1987", 4144: "Kalianpur 1937", 4145: "Kalianpur 1962", 4146: "Kalianpur 1975", 4147: "Hanoi 1972", 4148: "Hartebeesthoek94", 4149: "CH1903", 4150: "CH1903+", 4151: "CHTRF95", 4152: "NAD83(HARN)", 4153: "Rassadiran", 4154: "ED50(ED77)", 4155: "Dabola 1981", 4156: "S-JTSK", 4157: "Mount Dillon", 4158: "Naparima 1955", 4159: "ELD79", 4160: "Chos Malal 1914", 4161: "Pampa del Castillo", 4162: "Korean 1985", 4163: "Yemen NGN96", 4164: "South Yemen", 4165: "Bissau", 4166: "Korean 1995", 4167: "NZGD2000", 4168: "Accra", 4169: "American Samoa 1962", 4170: "SIRGAS 1995", 4171: "RGF93", 4172: "POSGAR", 4173: "IRENET95", 4174: "Sierra Leone 1924", 4175: "Sierra Leone 1968", 4176: "Australian Antarctic", 4178: "Pulkovo 1942(83)", 4179: "Pulkovo 1942(58)", 4180: "EST97", 4181: "Luxembourg 1930", 4182: "Azores Occidental 1939", 4183: "Azores Central 1948", 4184: "Azores Oriental 1940", 4185: "Madeira 1936", 4188: "OSNI 1952", 4189: "REGVEN", 4190: "POSGAR 98", 4191: "Albanian 1987", 4192: "Douala 1948", 4193: "Manoca 1962", 4194: "Qornoq 1927", 4195: "Scoresbysund 1952", 4196: "Ammassalik 1958", 4197: "Garoua", 4198: "Kousseri", 4199: "Egypt 1930", 4200: "Pulkovo 1995", 4201: "Adindan", 4202: "AGD66", 4203: "AGD84", 4204: "Ain el Abd", 4205: "Afgooye", 4206: "Agadez", 4207: "Lisbon", 4208: "Aratu", 4209: "Arc 1950", 4210: "Arc 1960", 4211: "Batavia", 4212: "Barbados 1938", 4213: "Beduaram", 4214: "Beijing 1954", 4215: "Belge 1950", 4216: "Bermuda 1957", 4218: "Bogota 1975", 4219: "Bukit Rimpah", 4220: "Camacupa", 4221: "Campo Inchauspe", 4222: "Cape", 4223: "Carthage", 4224: "Chua", 4225: "Corrego Alegre 1970-72", 4226: "Cote d'Ivoire", 4227: "Deir ez Zor", 4228: "Douala", 4229: "Egypt 1907", 4230: "ED50", 4231: "ED87", 4232: "Fahud", 4233: "Gandajika 1970", 4234: "Garoua", 4235: "Guyane Francaise", 4236: "Hu Tzu Shan 1950", 4237: "HD72", 4238: "ID74", 4239: "Indian 1954", 4240: "Indian 1975", 4241: "Jamaica 1875", 4242: "JAD69", 4243: "Kalianpur 1880", 4244: "Kandawala", 4245: "Kertau 1968", 4246: "KOC", 4247: "La Canoa", 4248: "PSAD56", 4249: "Lake", 4250: "Leigon", 4251: "Liberia 1964", 4252: "Lome", 4253: "Luzon 1911", 4254: "Hito XVIII 1963", 4255: "Herat North", 4256: "Mahe 1971", 4257: "Makassar", 4258: "ETRS89", 4259: "Malongo 1987", 4260: "Manoca", 4261: "Merchich", 4262: "Massawa", 4263: "Minna", 4264: "Mhast", 4265: "Monte Mario", 4266: "M'poraloko", 4267: "NAD27", 4268: "NAD27 Michigan", 4269: "NAD83", 4270: "Nahrwan 1967", 4271: "Naparima 1972", 4272: "NZGD49", 4273: "NGO 1948", 4274: "Datum 73", 4275: "NTF", 4276: "NSWC 9Z-2", 4277: "OSGB 1936", 4278: "OSGB70", 4279: "OS(SN)80", 4280: "Padang", 4281: "Palestine 1923", 4282: "Pointe Noire", 4283: "GDA94", 4284: "Pulkovo 1942", 4285: "Qatar 1974", 4286: "Qatar 1948", 4287: "Qornoq", 4288: "Loma Quintana", 4289: "Amersfoort", 4291: "SAD69", 4292: "Sapper Hill 1943", 4293: "Schwarzeck", 4294: "Segora", 4295: "Serindung", 4296: "Sudan", 4297: "Tananarive", 4298: "Timbalai 1948", 4299: "TM65", 4300: "TM75", 4301: "Tokyo", 4302: "Trinidad 1903", 4303: "TC(1948)", 4304: "Voirol 1875", 4306: "Bern 1938", 4307: "Nord Sahara 1959", 4308: "RT38", 4309: "Yacare", 4310: "Yoff", 4311: "Zanderij", 4312: "MGI", 4313: "Belge 1972", 4314: "DHDN", 4315: "Conakry 1905", 4316: "Dealul Piscului 1930", 4317: "Dealul Piscului 1970", 4318: "NGN", 4319: "KUDAMS", 4322: "WGS 72", 4324: "WGS 72BE", 4326: "WGS 84", 4463: "RGSPM06", 4470: "RGM04", 4475: "Cadastre 1997", 4483: "Mexico ITRF92", 4490: "China Geodetic Coordinate System 2000", 4555: "New Beijing", 4558: "RRAF 1991", 4600: "Anguilla 1957", 4601: "Antigua 1943", 4602: "Dominica 1945", 4603: "Grenada 1953", 4604: "Montserrat 1958", 4605: "St. Kitts 1955", 4606: "St. Lucia 1955", 4607: "St. Vincent 1945", 4608: "NAD27(76)", 4609: "NAD27(CGQ77)", 4610: "Xian 1980", 4611: "Hong Kong 1980", 4612: "JGD2000", 4613: "Segara", 4614: "QND95", 4615: "Porto Santo", 4616: "Selvagem Grande", 4617: "NAD83(CSRS)", 4618: "SAD69", 4619: "SWEREF99", 4620: "Point 58", 4621: "Fort Marigot", 4622: "Guadeloupe 1948", 4623: "CSG67", 4624: "RGFG95", 4625: "Martinique 1938", 4626: "Reunion 1947", 4627: "RGR92", 4628: "Tahiti 52", 4629: "Tahaa 54", 4630: "IGN72 Nuku Hiva", 4631: "K0 1949", 4632: "Combani 1950", 4633: "IGN56 Lifou", 4634: "IGN72 Grand Terre", 4635: "ST87 Ouvea", 4636: "Petrels 1972", 4637: "Perroud 1950", 4638: "Saint Pierre et Miquelon 1950", 4639: "MOP78", 4640: "RRAF 1991", 4641: "IGN53 Mare", 4642: "ST84 Ile des Pins", 4643: "ST71 Belep", 4644: "NEA74 Noumea", 4645: "RGNC 1991", 4646: "Grand Comoros", 4657: "Reykjavik 1900", 4658: "Hjorsey 1955", 4659: "ISN93", 4660: "Helle 1954", 4661: "LKS92", 4662: "IGN72 Grande Terre", 4663: "Porto Santo 1995", 4664: "Azores Oriental 1995", 4665: "Azores Central 1995", 4666: "Lisbon 1890", 4667: "IKBD-92", 4668: "ED79", 4669: "LKS94", 4670: "IGM95", 4671: "Voirol 1879", 4672: "Chatham Islands 1971", 4673: "Chatham Islands 1979", 4674: "SIRGAS 2000", 4675: "Guam 1963", 4676: "Vientiane 1982", 4677: "Lao 1993", 4678: "Lao 1997", 4679: "Jouik 1961", 4680: "Nouakchott 1965", 4681: "Mauritania 1999", 4682: "Gulshan 303", 4683: "PRS92", 4684: "Gan 1970", 4685: "Gandajika", 4686: "MAGNA-SIRGAS", 4687: "RGPF", 4688: "Fatu Iva 72", 4689: "IGN63 Hiva Oa", 4690: "Tahiti 79", 4691: "Moorea 87", 4692: "Maupiti 83", 4693: "Nakhl-e Ghanem", 4694: "POSGAR 94", 4695: "Katanga 1955", 4696: "Kasai 1953", 4697: "IGC 1962 6th Parallel South", 4698: "IGN 1962 Kerguelen", 4699: "Le Pouce 1934", 4700: "IGN Astro 1960", 4701: "IGCB 1955", 4702: "Mauritania 1999", 4703: "Mhast 1951", 4704: "Mhast (onshore)", 4705: "Mhast (offshore)", 4706: "Egypt Gulf of Suez S-650 TL", 4707: "Tern Island 1961", 4708: "Cocos Islands 1965", 4709: "Iwo Jima 1945", 4710: "St. Helena 1971", 4711: "Marcus Island 1952", 4712: "Ascension Island 1958", 4713: "Ayabelle Lighthouse", 4714: "Bellevue", 4715: "Camp Area Astro", 4716: "Phoenix Islands 1966", 4717: "Cape Canaveral", 4718: "Solomon 1968", 4719: "Easter Island 1967", 4720: "Fiji 1986", 4721: "Fiji 1956", 4722: "South Georgia 1968", 4723: "GCGD59", 4724: "Diego Garcia 1969", 4725: "Johnston Island 1961", 4726: "SIGD61", 4727: "Midway 1961", 4728: "Pico de las Nieves 1984", 4729: "Pitcairn 1967", 4730: "Santo 1965", 4731: "Viti Levu 1916", 4732: "Marshall Islands 1960", 4733: "Wake Island 1952", 4734: "Tristan 1968", 4735: "Kusaie 1951", 4736: "Deception Island", 4737: "Korea 2000", 4738: "Hong Kong 1963", 4739: "Hong Kong 1963(67)", 4740: "PZ-90", 4741: "FD54", 4742: "GDM2000", 4743: "Karbala 1979", 4744: "Nahrwan 1934", 4745: "RD/83", 4746: "PD/83", 4747: "GR96", 4748: "Vanua Levu 1915", 4749: "RGNC91-93", 4750: "ST87 Ouvea", 4751: "Kertau (RSO)", 4752: "Viti Levu 1912", 4753: "fk89", 4754: "LGD2006", 4755: "DGN95", 4756: "VN-2000", 4757: "SVY21", 4758: "JAD2001", 4759: "NAD83(NSRS2007)", 4760: "WGS 66", 4761: "HTRS96", 4762: "BDA2000", 4763: "Pitcairn 2006", 4764: "RSRGD2000", 4765: "Slovenia 1996", 4801: "Bern 1898 (Bern)", 4802: "Bogota 1975 (Bogota)", 4803: "Lisbon (Lisbon)", 4804: "Makassar (Jakarta)", 4805: "MGI (Ferro)", 4806: "Monte Mario (Rome)", 4807: "NTF (Paris)", 4808: "Padang (Jakarta)", 4809: "Belge 1950 (Brussels)", 4810: "Tananarive (Paris)", 4811: "Voirol 1875 (Paris)", 4813: "Batavia (Jakarta)", 4814: "RT38 (Stockholm)", 4815: "Greek (Athens)", 4816: "Carthage (Paris)", 4817: "NGO 1948 (Oslo)", 4818: "S-JTSK (Ferro)", 4819: "Nord Sahara 1959 (Paris)", 4820: "Segara (Jakarta)", 4821: "Voirol 1879 (Paris)", 4823: "Sao Tome", 4824: "Principe", 4901: "ATF (Paris)", 4902: "NDG (Paris)", 4903: "Madrid 1870 (Madrid)", 4904: "Lisbon 1890 (Lisbon)", 5013: "PTRA08", 5132: "Tokyo 1892", 5228: "S-JTSK/05", 5229: "S-JTSK/05 (Ferro)", 5233: "SLD99", 5246: "GDBD2009", 5252: "TUREF", 5264: "DRUKREF 03", 5324: "ISN2004", 5340: "POSGAR 2007", 5354: "MARGEN", 5360: "SIRGAS-Chile", 5365: "CR05", 5371: "MACARIO SOLIS", 5373: "Peru96", 5381: "SIRGAS-ROU98", 5393: "SIRGAS_ES2007.8", 5451: "Ocotepeque 1935", 5464: "Sibun Gorge 1922", 5467: "Panama-Colon 1911", 5489: "RGAF09", 5524: "Corrego Alegre 1961", 5527: "SAD69(96)", 5546: "PNG94", 5561: "UCS-2000", 5593: "FEH2010", 5681: "DB_REF", 5886: "TGD2005", 6135: "CIGD11", 6207: "Nepal 1981", 6311: "CGRS93", 6318: "NAD83(2011)", 6322: "NAD83(PA11)", 6325: "NAD83(MA11)", 6365: "Mexico ITRF2008", 6668: "JGD2011", 6706: "RDN2008", 6783: "NAD83(CORS96)", 6881: "Aden 1925", 6882: "Bekaa Valley 1920", 6883: "Bioko", 6892: "South East Island 1943", 6894: "Gambia", 6980: "IGD05", 6983: "IG05 Intermediate CRS", 6987: "IGD05/12", 6990: "IG05/12 Intermediate CRS", 7035: "RGSPM06 (lon-lat)", 7037: "RGR92 (lon-lat)", 7039: "RGM04 (lon-lat)", 7041: "RGFG95 (lon-lat)", 7073: "RGTAAF07", 7084: "RGF93 (lon-lat)", 7086: "RGAF09 (lon-lat)", 7088: "RGTAAF07 (lon-lat)", 7133: "RGTAAF07 (lon-lat)", 7136: "IGD05", 7139: "IGD05/12", 7373: "ONGD14", 7686: "Kyrg-06", 32767: "User-defined", 61206405: "Greek (deg)", 61216405: "GGRS87 (deg)", 61226405: "ATS77 (deg)", 61236405: "KKJ (deg)", 61246405: "RT90 (deg)", 61266405: "LKS94 (ETRS89) (deg)", 61266413: "LKS94 (ETRS89) (3D deg)", 61276405: "Tete (deg)", 61286405: "Madzansua (deg)", 61296405: "Observatario (deg)", 61306405: "Moznet (deg)", 61316405: "Indian 1960 (deg)", 61326405: "FD58 (deg)", 61336405: "EST92 (deg)", 61346405: "PDO Survey Datum 1993 (deg)", 61356405: "Old Hawaiian (deg)", 61366405: "St. Lawrence Island (deg)", 61376405: "St. Paul Island (deg)", 61386405: "St. George Island (deg)", 61396405: "Puerto Rico (deg)", 61406405: "NAD83(CSRS) (deg)", 61416405: "Israel (deg)", 61426405: "Locodjo 1965 (deg)", 61436405: "Abidjan 1987 (deg)", 61446405: "Kalianpur 1937 (deg)", 61456405: "Kalianpur 1962 (deg)", 61466405: "Kalianpur 1975 (deg)", 61476405: "Hanoi 1972 (deg)", 61486405: "Hartebeesthoek94 (deg)", 61496405: "CH1903 (deg)", 61506405: "CH1903+ (deg)", 61516405: "CHTRF95 (deg)", 61526405: "NAD83(HARN) (deg)", 61536405: "Rassadiran (deg)", 61546405: "ED50(ED77) (deg)", 61556405: "Dabola 1981 (deg)", 61566405: "S-JTSK (deg)", 61576405: "Mount Dillon (deg)", 61586405: "Naparima 1955 (deg)", 61596405: "ELD79 (deg)", 61606405: "Chos Malal 1914 (deg)", 61616405: "Pampa del Castillo (deg)", 61626405: "Korean 1985 (deg)", 61636405: "Yemen NGN96 (deg)", 61646405: "South Yemen (deg)", 61656405: "Bissau (deg)", 61666405: "Korean 1995 (deg)", 61676405: "NZGD2000 (deg)", 61686405: "Accra (deg)", 61696405: "American Samoa 1962 (deg)", 61706405: "SIRGAS (deg)", 61716405: "RGF93 (deg)", 61736405: "IRENET95 (deg)", 61746405: "Sierra Leone 1924 (deg)", 61756405: "Sierra Leone 1968 (deg)", 61766405: "Australian Antarctic (deg)", 61786405: "Pulkovo 1942(83) (deg)", 61796405: "Pulkovo 1942(58) (deg)", 61806405: "EST97 (deg)", 61816405: "Luxembourg 1930 (deg)", 61826405: "Azores Occidental 1939 (deg)", 61836405: "Azores Central 1948 (deg)", 61846405: "Azores Oriental 1940 (deg)", 61886405: "OSNI 1952 (deg)", 61896405: "REGVEN (deg)", 61906405: "POSGAR 98 (deg)", 61916405: "Albanian 1987 (deg)", 61926405: "Douala 1948 (deg)", 61936405: "Manoca 1962 (deg)", 61946405: "Qornoq 1927 (deg)", 61956405: "Scoresbysund 1952 (deg)", 61966405: "Ammassalik 1958 (deg)", 61976405: "Garoua (deg)", 61986405: "Kousseri (deg)", 61996405: "Egypt 1930 (deg)", 62006405: "Pulkovo 1995 (deg)", 62016405: "Adindan (deg)", 62026405: "AGD66 (deg)", 62036405: "AGD84 (deg)", 62046405: "Ain el Abd (deg)", 62056405: "Afgooye (deg)", 62066405: "Agadez (deg)", 62076405: "Lisbon (deg)", 62086405: "Aratu (deg)", 62096405: "Arc 1950 (deg)", 62106405: "Arc 1960 (deg)", 62116405: "Batavia (deg)", 62126405: "Barbados 1938 (deg)", 62136405: "Beduaram (deg)", 62146405: "Beijing 1954 (deg)", 62156405: "Belge 1950 (deg)", 62166405: "Bermuda 1957 (deg)", 62186405: "Bogota 1975 (deg)", 62196405: "Bukit Rimpah (deg)", 62206405: "Camacupa (deg)", 62216405: "Campo Inchauspe (deg)", 62226405: "Cape (deg)", 62236405: "Carthage (deg)", 62246405: "Chua (deg)", 62256405: "Corrego Alegre (deg)", 62276405: "Deir ez Zor (deg)", 62296405: "Egypt 1907 (deg)", 62306405: "ED50 (deg)", 62316405: "ED87 (deg)", 62326405: "Fahud (deg)", 62336405: "Gandajika 1970 (deg)", 62366405: "Hu Tzu Shan (deg)", 62376405: "HD72 (deg)", 62386405: "ID74 (deg)", 62396405: "Indian 1954 (deg)", 62406405: "Indian 1975 (deg)", 62416405: "Jamaica 1875 (deg)", 62426405: "JAD69 (deg)", 62436405: "Kalianpur 1880 (deg)", 62446405: "Kandawala (deg)", 62456405: "Kertau (deg)", 62466405: "KOC (deg)", 62476405: "La Canoa (deg)", 62486405: "PSAD56 (deg)", 62496405: "Lake (deg)", 62506405: "Leigon (deg)", 62516405: "Liberia 1964 (deg)", 62526405: "Lome (deg)", 62536405: "Luzon 1911 (deg)", 62546405: "Hito XVIII 1963 (deg)", 62556405: "Herat North (deg)", 62566405: "Mahe 1971 (deg)", 62576405: "Makassar (deg)", 62586405: "ETRS89 (deg)", 62596405: "Malongo 1987 (deg)", 62616405: "Merchich (deg)", 62626405: "Massawa (deg)", 62636405: "Minna (deg)", 62646405: "Mhast (deg)", 62656405: "Monte Mario (deg)", 62666405: "M'poraloko (deg)", 62676405: "NAD27 (deg)", 62686405: "NAD27 Michigan (deg)", 62696405: "NAD83 (deg)", 62706405: "Nahrwan 1967 (deg)", 62716405: "Naparima 1972 (deg)", 62726405: "NZGD49 (deg)", 62736405: "NGO 1948 (deg)", 62746405: "Datum 73 (deg)", 62756405: "NTF (deg)", 62766405: "NSWC 9Z-2 (deg)", 62776405: "OSGB 1936 (deg)", 62786405: "OSGB70 (deg)", 62796405: "OS(SN)80 (deg)", 62806405: "Padang (deg)", 62816405: "Palestine 1923 (deg)", 62826405: "Pointe Noire (deg)", 62836405: "GDA94 (deg)", 62846405: "Pulkovo 1942 (deg)", 62856405: "Qatar 1974 (deg)", 62866405: "Qatar 1948 (deg)", 62886405: "Loma Quintana (deg)", 62896405: "Amersfoort (deg)", 62926405: "Sapper Hill 1943 (deg)", 62936405: "Schwarzeck (deg)", 62956405: "Serindung (deg)", 62976405: "Tananarive (deg)", 62986405: "Timbalai 1948 (deg)", 62996405: "TM65 (deg)", 63006405: "TM75 (deg)", 63016405: "Tokyo (deg)", 63026405: "Trinidad 1903 (deg)", 63036405: "TC(1948) (deg)", 63046405: "Voirol 1875 (deg)", 63066405: "Bern 1938 (deg)", 63076405: "Nord Sahara 1959 (deg)", 63086405: "RT38 (deg)", 63096405: "Yacare (deg)", 63106405: "Yoff (deg)", 63116405: "Zanderij (deg)", 63126405: "MGI (deg)", 63136405: "Belge 1972 (deg)", 63146405: "DHDN (deg)", 63156405: "Conakry 1905 (deg)", 63166405: "Dealul Piscului 1933 (deg)", 63176405: "Dealul Piscului 1970 (deg)", 63186405: "NGN (deg)", 63196405: "KUDAMS (deg)", 63226405: "WGS 72 (deg)", 63246405: "WGS 72BE (deg)", 63266405: "WGS 84 (deg)", 63266406: "WGS 84 (degH)", 63266407: "WGS 84 (Hdeg)", 63266408: "WGS 84 (DM)", 63266409: "WGS 84 (DMH)", 63266410: "WGS 84 (HDM)", 63266411: "WGS 84 (DMS)", 63266412: "WGS 84 (HDMS)", 66006405: "Anguilla 1957 (deg)", 66016405: "Antigua 1943 (deg)", 66026405: "Dominica 1945 (deg)", 66036405: "Grenada 1953 (deg)", 66046405: "Montserrat 1958 (deg)", 66056405: "St. Kitts 1955 (deg)", 66066405: "St. Lucia 1955 (deg)", 66076405: "St. Vincent 1945 (deg)", 66086405: "NAD27(76) (deg)", 66096405: "NAD27(CGQ77) (deg)", 66106405: "Xian 1980 (deg)", 66116405: "Hong Kong 1980 (deg)", 66126405: "JGD2000 (deg)", 66136405: "Segara (deg)", 66146405: "QND95 (deg)", 66156405: "Porto Santo (deg)", 66166405: "Selvagem Grande (deg)", 66186405: "SAD69 (deg)", 66196405: "SWEREF99 (deg)", 66206405: "Point 58 (deg)", 66216405: "Fort Marigot (deg)", 66226405: "Sainte Anne (deg)", 66236405: "CSG67 (deg)", 66246405: "RGFG95 (deg)", 66256405: "Fort Desaix (deg)", 66266405: "Piton des Neiges (deg)", 66276405: "RGR92 (deg)", 66286405: "Tahiti (deg)", 66296405: "Tahaa (deg)", 66306405: "IGN72 Nuku Hiva (deg)", 66316405: "K0 1949 (deg)", 66326405: "Combani 1950 (deg)", 66336405: "IGN56 Lifou (deg)", 66346405: "IGN72 Grande Terre (deg)", 66356405: "ST87 Ouvea (deg)", 66366405: "Petrels 1972 (deg)", 66376405: "Perroud 1950 (deg)", 66386405: "Saint Pierre et Miquelon 1950 (deg)", 66396405: "MOP78 (deg)", 66406405: "RRAF 1991 (deg)", 66416405: "IGN53 Mare (deg)", 66426405: "ST84 Ile des Pins (deg)", 66436405: "ST71 Belep (deg)", 66446405: "NEA74 Noumea (deg)", 66456405: "RGNC 1991 (deg)", 66466405: "Grand Comoros (deg)", 66576405: "Reykjavik 1900 (deg)", 66586405: "Hjorsey 1955 (deg)", 66596405: "ISN93 (deg)", 66606405: "Helle 1954 (deg)", 66616405: "LKS92 (deg)", 66636405: "Porto Santo 1995 (deg)", 66646405: "Azores Oriental 1995 (deg)", 66656405: "Azores Central 1995 (deg)", 66666405: "Lisbon 1890 (deg)", 66676405: "IKBD-92 (deg)", 68016405: "Bern 1898 (Bern) (deg)", 68026405: "Bogota 1975 (Bogota) (deg)", 68036405: "Lisbon (Lisbon) (deg)", 68046405: "Makassar (Jakarta) (deg)", 68056405: "MGI (Ferro) (deg)", 68066405: "Monte Mario (Rome) (deg)", 68086405: "Padang (Jakarta) (deg)", 68096405: "Belge 1950 (Brussels) (deg)", 68136405: "Batavia (Jakarta) (deg)", 68146405: "RT38 (Stockholm) (deg)", 68156405: "Greek (Athens) (deg)", 68186405: "S-JTSK (Ferro) (deg)", 68206405: "Segara (Jakarta) (deg)", 69036405: "Madrid 1870 (Madrid) (deg)" } ProjectedCSTypeGeoKey = { 2000: "Anguilla 1957 / British West Indies Grid", 2001: "Antigua 1943 / British West Indies Grid", 2002: "Dominica 1945 / British West Indies Grid", 2003: "Grenada 1953 / British West Indies Grid", 2004: "Montserrat 1958 / British West Indies Grid", 2005: "St. Kitts 1955 / British West Indies Grid", 2006: "St. Lucia 1955 / British West Indies Grid", 2007: "St. Vincent 45 / British West Indies Grid", 2008: "NAD27(CGQ77) / SCoPQ zone 2", 2009: "NAD27(CGQ77) / SCoPQ zone 3", 2010: "NAD27(CGQ77) / SCoPQ zone 4", 2011: "NAD27(CGQ77) / SCoPQ zone 5", 2012: "NAD27(CGQ77) / SCoPQ zone 6", 2013: "NAD27(CGQ77) / SCoPQ zone 7", 2014: "NAD27(CGQ77) / SCoPQ zone 8", 2015: "NAD27(CGQ77) / SCoPQ zone 9", 2016: "NAD27(CGQ77) / SCoPQ zone 10", 2017: "NAD27(76) / MTM zone 8", 2018: "NAD27(76) / MTM zone 9", 2019: "NAD27(76) / MTM zone 10", 2020: "NAD27(76) / MTM zone 11", 2021: "NAD27(76) / MTM zone 12", 2022: "NAD27(76) / MTM zone 13", 2023: "NAD27(76) / MTM zone 14", 2024: "NAD27(76) / MTM zone 15", 2025: "NAD27(76) / MTM zone 16", 2026: "NAD27(76) / MTM zone 17", 2027: "NAD27(76) / UTM zone 15N", 2028: "NAD27(76) / UTM zone 16N", 2029: "NAD27(76) / UTM zone 17N", 2030: "NAD27(76) / UTM zone 18N", 2031: "NAD27(CGQ77) / UTM zone 17N", 2032: "NAD27(CGQ77) / UTM zone 18N", 2033: "NAD27(CGQ77) / UTM zone 19N", 2034: "NAD27(CGQ77) / UTM zone 20N", 2035: "NAD27(CGQ77) / UTM zone 21N", 2036: "NAD83(CSRS98) / New Brunswick Stereo", 2037: "NAD83(CSRS98) / UTM zone 19N", 2038: "NAD83(CSRS98) / UTM zone 20N", 2039: "Israel 1993 / Israeli TM Grid", 2040: "Locodjo 1965 / UTM zone 30N", 2041: "Abidjan 1987 / UTM zone 30N", 2042: "Locodjo 1965 / UTM zone 29N", 2043: "Abidjan 1987 / UTM zone 29N", 2044: "Hanoi 1972 / Gauss-Kruger zone 18", 2045: "Hanoi 1972 / Gauss-Kruger zone 19", 2046: "Hartebeesthoek94 / Lo15", 2047: "Hartebeesthoek94 / Lo17", 2048: "Hartebeesthoek94 / Lo19", 2049: "Hartebeesthoek94 / Lo21", 2050: "Hartebeesthoek94 / Lo23", 2051: "Hartebeesthoek94 / Lo25", 2052: "Hartebeesthoek94 / Lo27", 2053: "Hartebeesthoek94 / Lo29", 2054: "Hartebeesthoek94 / Lo31", 2055: "Hartebeesthoek94 / Lo33", 2056: "CH1903+ / LV95", 2057: "Rassadiran / Nakhl e Taqi", 2058: "ED50(ED77) / UTM zone 38N", 2059: "ED50(ED77) / UTM zone 39N", 2060: "ED50(ED77) / UTM zone 40N", 2061: "ED50(ED77) / UTM zone 41N", 2062: "Madrid 1870 (Madrid) / Spain", 2063: "Dabola 1981 / UTM zone 28N", 2064: "Dabola 1981 / UTM zone 29N", 2065: "S-JTSK (Ferro) / Krovak", 2066: "Mount Dillon / Tobago Grid", 2067: "Naparima 1955 / UTM zone 20N", 2068: "ELD79 / Libya zone 5", 2069: "ELD79 / Libya zone 6", 2070: "ELD79 / Libya zone 7", 2071: "ELD79 / Libya zone 8", 2072: "ELD79 / Libya zone 9", 2073: "ELD79 / Libya zone 10", 2074: "ELD79 / Libya zone 11", 2075: "ELD79 / Libya zone 12", 2076: "ELD79 / Libya zone 13", 2077: "ELD79 / UTM zone 32N", 2078: "ELD79 / UTM zone 33N", 2079: "ELD79 / UTM zone 34N", 2080: "ELD79 / UTM zone 35N", 2081: "Chos Malal 1914 / Argentina 2", 2082: "Pampa del Castillo / Argentina 2", 2083: "Hito XVIII 1963 / Argentina 2", 2084: "Hito XVIII 1963 / UTM zone 19S", 2085: "NAD27 / Cuba Norte", 2086: "NAD27 / Cuba Sur", 2087: "ELD79 / TM 12 NE", 2088: "Carthage / TM 11 NE", 2089: "Yemen NGN96 / UTM zone 38N", 2090: "Yemen NGN96 / UTM zone 39N", 2091: "South Yemen / Gauss Kruger zone 8", 2092: "South Yemen / Gauss Kruger zone 9", 2093: "Hanoi 1972 / GK 106 NE", 2094: "WGS 72BE / TM 106 NE", 2095: "Bissau / UTM zone 28N", 2096: "Korean 1985 / East Belt", 2097: "Korean 1985 / Central Belt", 2098: "Korean 1985 / West Belt", 2099: "Qatar 1948 / Qatar Grid", 2100: "GGRS87 / Greek Grid", 2101: "Lake / Maracaibo Grid M1", 2102: "Lake / Maracaibo Grid", 2103: "Lake / Maracaibo Grid M3", 2104: "Lake / Maracaibo La Rosa Grid", 2105: "NZGD2000 / Mount Eden 2000", 2106: "NZGD2000 / Bay of Plenty 2000", 2107: "NZGD2000 / Poverty Bay 2000", 2108: "NZGD2000 / Hawkes Bay 2000", 2109: "NZGD2000 / Taranaki 2000", 2110: "NZGD2000 / Tuhirangi 2000", 2111: "NZGD2000 / Wanganui 2000", 2112: "NZGD2000 / Wairarapa 2000", 2113: "NZGD2000 / Wellington 2000", 2114: "NZGD2000 / Collingwood 2000", 2115: "NZGD2000 / Nelson 2000", 2116: "NZGD2000 / Karamea 2000", 2117: "NZGD2000 / Buller 2000", 2118: "NZGD2000 / Grey 2000", 2119: "NZGD2000 / Amuri 2000", 2120: "NZGD2000 / Marlborough 2000", 2121: "NZGD2000 / Hokitika 2000", 2122: "NZGD2000 / Okarito 2000", 2123: "NZGD2000 / Jacksons Bay 2000", 2124: "NZGD2000 / Mount Pleasant 2000", 2125: "NZGD2000 / Gawler 2000", 2126: "NZGD2000 / Timaru 2000", 2127: "NZGD2000 / Lindis Peak 2000", 2128: "NZGD2000 / Mount Nicholas 2000", 2129: "NZGD2000 / Mount York 2000", 2130: "NZGD2000 / Observation Point 2000", 2131: "NZGD2000 / North Taieri 2000", 2132: "NZGD2000 / Bluff 2000", 2133: "NZGD2000 / UTM zone 58S", 2134: "NZGD2000 / UTM zone 59S", 2135: "NZGD2000 / UTM zone 60S", 2136: "Accra / Ghana National Grid", 2137: "Accra / TM 1 NW", 2138: "NAD27(CGQ77) / Quebec Lambert", 2139: "NAD83(CSRS98) / SCoPQ zone 2", 2140: "NAD83(CSRS98) / MTM zone 3", 2141: "NAD83(CSRS98) / MTM zone 4", 2142: "NAD83(CSRS98) / MTM zone 5", 2143: "NAD83(CSRS98) / MTM zone 6", 2144: "NAD83(CSRS98) / MTM zone 7", 2145: "NAD83(CSRS98) / MTM zone 8", 2146: "NAD83(CSRS98) / MTM zone 9", 2147: "NAD83(CSRS98) / MTM zone 10", 2148: "NAD83(CSRS98) / UTM zone 21N", 2149: "NAD83(CSRS98) / UTM zone 18N", 2150: "NAD83(CSRS98) / UTM zone 17N", 2151: "NAD83(CSRS98) / UTM zone 13N", 2152: "NAD83(CSRS98) / UTM zone 12N", 2153: "NAD83(CSRS98) / UTM zone 11N", 2154: "RGF93 / Lambert-93", 2155: "American Samoa 1962 / American Samoa Lambert", 2156: "NAD83(HARN) / UTM zone 59S", 2157: "IRENET95 / Irish Transverse Mercator", 2158: "IRENET95 / UTM zone 29N", 2159: "Sierra Leone 1924 / New Colony Grid", 2160: "Sierra Leone 1924 / New War Office Grid", 2161: "Sierra Leone 1968 / UTM zone 28N", 2162: "Sierra Leone 1968 / UTM zone 29N", 2163: "US National Atlas Equal Area", 2164: "Locodjo 1965 / TM 5 NW", 2165: "Abidjan 1987 / TM 5 NW", 2166: "Pulkovo 1942(83) / Gauss Kruger zone 3", 2167: "Pulkovo 1942(83) / Gauss Kruger zone 4", 2168: "Pulkovo 1942(83) / Gauss Kruger zone 5", 2169: "Luxembourg 1930 / Gauss", 2170: "MGI / Slovenia Grid", 2171: "Pulkovo 1942(58) / Poland zone I", 2172: "Pulkovo 1942(58) / Poland zone II", 2173: "Pulkovo 1942(58) / Poland zone III", 2174: "Pulkovo 1942(58) / Poland zone IV", 2175: "Pulkovo 1942(58) / Poland zone V", 2176: "ETRS89 / Poland CS2000 zone 5", 2177: "ETRS89 / Poland CS2000 zone 6", 2178: "ETRS89 / Poland CS2000 zone 7", 2179: "ETRS89 / Poland CS2000 zone 8", 2180: "ETRS89 / Poland CS92", 2188: "Azores Occidental 1939 / UTM zone 25N", 2189: "Azores Central 1948 / UTM zone 26N", 2190: "Azores Oriental 1940 / UTM zone 26N", 2191: "Madeira 1936 / UTM zone 28N", 2192: "ED50 / France EuroLambert", 2193: "NZGD2000 / New Zealand Transverse Mercator 2000", 2194: "American Samoa 1962 / American Samoa Lambert", 2195: "NAD83(HARN) / UTM zone 2S", 2196: "ETRS89 / Kp2000 Jutland", 2197: "ETRS89 / Kp2000 Zealand", 2198: "ETRS89 / Kp2000 Bornholm", 2199: "Albanian 1987 / Gauss Kruger zone 4", 2200: "ATS77 / New Brunswick Stereographic (ATS77)", 2201: "REGVEN / UTM zone 18N", 2202: "REGVEN / UTM zone 19N", 2203: "REGVEN / UTM zone 20N", 2204: "NAD27 / Tennessee", 2205: "NAD83 / Kentucky North", 2206: "ED50 / 3-degree Gauss-Kruger zone 9", 2207: "ED50 / 3-degree Gauss-Kruger zone 10", 2208: "ED50 / 3-degree Gauss-Kruger zone 11", 2209: "ED50 / 3-degree Gauss-Kruger zone 12", 2210: "ED50 / 3-degree Gauss-Kruger zone 13", 2211: "ED50 / 3-degree Gauss-Kruger zone 14", 2212: "ED50 / 3-degree Gauss-Kruger zone 15", 2213: "ETRS89 / TM 30 NE", 2214: "Douala 1948 / AOF west", 2215: "Manoca 1962 / UTM zone 32N", 2216: "Qornoq 1927 / UTM zone 22N", 2217: "Qornoq 1927 / UTM zone 23N", 2218: "Scoresbysund 1952 / Greenland zone 5 east", 2219: "ATS77 / UTM zone 19N", 2220: "ATS77 / UTM zone 20N", 2221: "Scoresbysund 1952 / Greenland zone 6 east", 2222: "NAD83 / Arizona East (ft)", 2223: "NAD83 / Arizona Central (ft)", 2224: "NAD83 / Arizona West (ft)", 2225: "NAD83 / California zone 1 (ftUS)", 2226: "NAD83 / California zone 2 (ftUS)", 2227: "NAD83 / California zone 3 (ftUS)", 2228: "NAD83 / California zone 4 (ftUS)", 2229: "NAD83 / California zone 5 (ftUS)", 2230: "NAD83 / California zone 6 (ftUS)", 2231: "NAD83 / Colorado North (ftUS)", 2232: "NAD83 / Colorado Central (ftUS)", 2233: "NAD83 / Colorado South (ftUS)", 2234: "NAD83 / Connecticut (ftUS)", 2235: "NAD83 / Delaware (ftUS)", 2236: "NAD83 / Florida East (ftUS)", 2237: "NAD83 / Florida West (ftUS)", 2238: "NAD83 / Florida North (ftUS)", 2239: "NAD83 / Georgia East (ftUS)", 2240: "NAD83 / Georgia West (ftUS)", 2241: "NAD83 / Idaho East (ftUS)", 2242: "NAD83 / Idaho Central (ftUS)", 2243: "NAD83 / Idaho West (ftUS)", 2244: "NAD83 / Indiana East (ftUS)", 2245: "NAD83 / Indiana West (ftUS)", 2246: "NAD83 / Kentucky North (ftUS)", 2247: "NAD83 / Kentucky South (ftUS)", 2248: "NAD83 / Maryland (ftUS)", 2249: "NAD83 / Massachusetts Mainland (ftUS)", 2250: "NAD83 / Massachusetts Island (ftUS)", 2251: "NAD83 / Michigan North (ft)", 2252: "NAD83 / Michigan Central (ft)", 2253: "NAD83 / Michigan South (ft)", 2254: "NAD83 / Mississippi East (ftUS)", 2255: "NAD83 / Mississippi West (ftUS)", 2256: "NAD83 / Montana (ft)", 2257: "NAD83 / New Mexico East (ftUS)", 2258: "NAD83 / New Mexico Central (ftUS)", 2259: "NAD83 / New Mexico West (ftUS)", 2260: "NAD83 / New York East (ftUS)", 2261: "NAD83 / New York Central (ftUS)", 2262: "NAD83 / New York West (ftUS)", 2263: "NAD83 / New York Long Island (ftUS)", 2264: "NAD83 / North Carolina (ftUS)", 2265: "NAD83 / North Dakota North (ft)", 2266: "NAD83 / North Dakota South (ft)", 2267: "NAD83 / Oklahoma North (ftUS)", 2268: "NAD83 / Oklahoma South (ftUS)", 2269: "NAD83 / Oregon North (ft)", 2270: "NAD83 / Oregon South (ft)", 2271: "NAD83 / Pennsylvania North (ftUS)", 2272: "NAD83 / Pennsylvania South (ftUS)", 2273: "NAD83 / South Carolina (ft)", 2274: "NAD83 / Tennessee (ftUS)", 2275: "NAD83 / Texas North (ftUS)", 2276: "NAD83 / Texas North Central (ftUS)", 2277: "NAD83 / Texas Central (ftUS)", 2278: "NAD83 / Texas South Central (ftUS)", 2279: "NAD83 / Texas South (ftUS)", 2280: "NAD83 / Utah North (ft)", 2281: "NAD83 / Utah Central (ft)", 2282: "NAD83 / Utah South (ft)", 2283: "NAD83 / Virginia North (ftUS)", 2284: "NAD83 / Virginia South (ftUS)", 2285: "NAD83 / Washington North (ftUS)", 2286: "NAD83 / Washington South (ftUS)", 2287: "NAD83 / Wisconsin North (ftUS)", 2288: "NAD83 / Wisconsin Central (ftUS)", 2289: "NAD83 / Wisconsin South (ftUS)", 2290: "ATS77 / Prince Edward Isl. Stereographic (ATS77)", 2291: "NAD83(CSRS98) / Prince Edward Isl. Stereographic (NAD83)", 2292: "NAD83(CSRS98) / Prince Edward Isl. Stereographic (NAD83)", 2294: "ATS77 / MTM Nova Scotia zone 4", 2295: "ATS77 / MTM Nova Scotia zone 5", 2296: "Ammassalik 1958 / Greenland zone 7 east", 2297: "Qornoq 1927 / Greenland zone 1 east", 2298: "Qornoq 1927 / Greenland zone 2 east", 2299: "Qornoq 1927 / Greenland zone 2 west", 2300: "Qornoq 1927 / Greenland zone 3 east", 2301: "Qornoq 1927 / Greenland zone 3 west", 2302: "Qornoq 1927 / Greenland zone 4 east", 2303: "Qornoq 1927 / Greenland zone 4 west", 2304: "Qornoq 1927 / Greenland zone 5 west", 2305: "Qornoq 1927 / Greenland zone 6 west", 2306: "Qornoq 1927 / Greenland zone 7 west", 2307: "Qornoq 1927 / Greenland zone 8 east", 2308: "Batavia / TM 109 SE", 2309: "WGS 84 / TM 116 SE", 2310: "WGS 84 / TM 132 SE", 2311: "WGS 84 / TM 6 NE", 2312: "Garoua / UTM zone 33N", 2313: "Kousseri / UTM zone 33N", 2314: "Trinidad 1903 / Trinidad Grid (ftCla)", 2315: "Campo Inchauspe / UTM zone 19S", 2316: "Campo Inchauspe / UTM zone 20S", 2317: "PSAD56 / ICN Regional", 2318: "Ain el Abd / Aramco Lambert", 2319: "ED50 / TM27", 2320: "ED50 / TM30", 2321: "ED50 / TM33", 2322: "ED50 / TM36", 2323: "ED50 / TM39", 2324: "ED50 / TM42", 2325: "ED50 / TM45", 2326: "Hong Kong 1980 Grid System", 2327: "Xian 1980 / Gauss-Kruger zone 13", 2328: "Xian 1980 / Gauss-Kruger zone 14", 2329: "Xian 1980 / Gauss-Kruger zone 15", 2330: "Xian 1980 / Gauss-Kruger zone 16", 2331: "Xian 1980 / Gauss-Kruger zone 17", 2332: "Xian 1980 / Gauss-Kruger zone 18", 2333: "Xian 1980 / Gauss-Kruger zone 19", 2334: "Xian 1980 / Gauss-Kruger zone 20", 2335: "Xian 1980 / Gauss-Kruger zone 21", 2336: "Xian 1980 / Gauss-Kruger zone 22", 2337: "Xian 1980 / Gauss-Kruger zone 23", 2338: "Xian 1980 / Gauss-Kruger CM 75E", 2339: "Xian 1980 / Gauss-Kruger CM 81E", 2340: "Xian 1980 / Gauss-Kruger CM 87E", 2341: "Xian 1980 / Gauss-Kruger CM 93E", 2342: "Xian 1980 / Gauss-Kruger CM 99E", 2343: "Xian 1980 / Gauss-Kruger CM 105E", 2344: "Xian 1980 / Gauss-Kruger CM 111E", 2345: "Xian 1980 / Gauss-Kruger CM 117E", 2346: "Xian 1980 / Gauss-Kruger CM 123E", 2347: "Xian 1980 / Gauss-Kruger CM 129E", 2348: "Xian 1980 / Gauss-Kruger CM 135E", 2349: "Xian 1980 / 3-degree Gauss-Kruger zone 25", 2350: "Xian 1980 / 3-degree Gauss-Kruger zone 26", 2351: "Xian 1980 / 3-degree Gauss-Kruger zone 27", 2352: "Xian 1980 / 3-degree Gauss-Kruger zone 28", 2353: "Xian 1980 / 3-degree Gauss-Kruger zone 29", 2354: "Xian 1980 / 3-degree Gauss-Kruger zone 30", 2355: "Xian 1980 / 3-degree Gauss-Kruger zone 31", 2356: "Xian 1980 / 3-degree Gauss-Kruger zone 32", 2357: "Xian 1980 / 3-degree Gauss-Kruger zone 33", 2358: "Xian 1980 / 3-degree Gauss-Kruger zone 34", 2359: "Xian 1980 / 3-degree Gauss-Kruger zone 35", 2360: "Xian 1980 / 3-degree Gauss-Kruger zone 36", 2361: "Xian 1980 / 3-degree Gauss-Kruger zone 37", 2362: "Xian 1980 / 3-degree Gauss-Kruger zone 38", 2363: "Xian 1980 / 3-degree Gauss-Kruger zone 39", 2364: "Xian 1980 / 3-degree Gauss-Kruger zone 40", 2365: "Xian 1980 / 3-degree Gauss-Kruger zone 41", 2366: "Xian 1980 / 3-degree Gauss-Kruger zone 42", 2367: "Xian 1980 / 3-degree Gauss-Kruger zone 43", 2368: "Xian 1980 / 3-degree Gauss-Kruger zone 44", 2369: "Xian 1980 / 3-degree Gauss-Kruger zone 45", 2370: "Xian 1980 / 3-degree Gauss-Kruger CM 75E", 2371: "Xian 1980 / 3-degree Gauss-Kruger CM 78E", 2372: "Xian 1980 / 3-degree Gauss-Kruger CM 81E", 2373: "Xian 1980 / 3-degree Gauss-Kruger CM 84E", 2374: "Xian 1980 / 3-degree Gauss-Kruger CM 87E", 2375: "Xian 1980 / 3-degree Gauss-Kruger CM 90E", 2376: "Xian 1980 / 3-degree Gauss-Kruger CM 93E", 2377: "Xian 1980 / 3-degree Gauss-Kruger CM 96E", 2378: "Xian 1980 / 3-degree Gauss-Kruger CM 99E", 2379: "Xian 1980 / 3-degree Gauss-Kruger CM 102E", 2380: "Xian 1980 / 3-degree Gauss-Kruger CM 105E", 2381: "Xian 1980 / 3-degree Gauss-Kruger CM 108E", 2382: "Xian 1980 / 3-degree Gauss-Kruger CM 111E", 2383: "Xian 1980 / 3-degree Gauss-Kruger CM 114E", 2384: "Xian 1980 / 3-degree Gauss-Kruger CM 117E", 2385: "Xian 1980 / 3-degree Gauss-Kruger CM 120E", 2386: "Xian 1980 / 3-degree Gauss-Kruger CM 123E", 2387: "Xian 1980 / 3-degree Gauss-Kruger CM 126E", 2388: "Xian 1980 / 3-degree Gauss-Kruger CM 129E", 2389: "Xian 1980 / 3-degree Gauss-Kruger CM 132E", 2390: "Xian 1980 / 3-degree Gauss-Kruger CM 135E", 2391: "KKJ / Finland zone 1", 2392: "KKJ / Finland zone 2", 2393: "KKJ / Finland Uniform Coordinate System", 2394: "KKJ / Finland zone 4", 2395: "South Yemen / Gauss-Kruger zone 8", 2396: "South Yemen / Gauss-Kruger zone 9", 2397: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 3", 2398: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 4", 2399: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 5", 2400: "RT90 2.5 gon W", 2401: "Beijing 1954 / 3-degree Gauss-Kruger zone 25", 2402: "Beijing 1954 / 3-degree Gauss-Kruger zone 26", 2403: "Beijing 1954 / 3-degree Gauss-Kruger zone 27", 2404: "Beijing 1954 / 3-degree Gauss-Kruger zone 28", 2405: "Beijing 1954 / 3-degree Gauss-Kruger zone 29", 2406: "Beijing 1954 / 3-degree Gauss-Kruger zone 30", 2407: "Beijing 1954 / 3-degree Gauss-Kruger zone 31", 2408: "Beijing 1954 / 3-degree Gauss-Kruger zone 32", 2409: "Beijing 1954 / 3-degree Gauss-Kruger zone 33", 2410: "Beijing 1954 / 3-degree Gauss-Kruger zone 34", 2411: "Beijing 1954 / 3-degree Gauss-Kruger zone 35", 2412: "Beijing 1954 / 3-degree Gauss-Kruger zone 36", 2413: "Beijing 1954 / 3-degree Gauss-Kruger zone 37", 2414: "Beijing 1954 / 3-degree Gauss-Kruger zone 38", 2415: "Beijing 1954 / 3-degree Gauss-Kruger zone 39", 2416: "Beijing 1954 / 3-degree Gauss-Kruger zone 40", 2417: "Beijing 1954 / 3-degree Gauss-Kruger zone 41", 2418: "Beijing 1954 / 3-degree Gauss-Kruger zone 42", 2419: "Beijing 1954 / 3-degree Gauss-Kruger zone 43", 2420: "Beijing 1954 / 3-degree Gauss-Kruger zone 44", 2421: "Beijing 1954 / 3-degree Gauss-Kruger zone 45", 2422: "Beijing 1954 / 3-degree Gauss-Kruger CM 75E", 2423: "Beijing 1954 / 3-degree Gauss-Kruger CM 78E", 2424: "Beijing 1954 / 3-degree Gauss-Kruger CM 81E", 2425: "Beijing 1954 / 3-degree Gauss-Kruger CM 84E", 2426: "Beijing 1954 / 3-degree Gauss-Kruger CM 87E", 2427: "Beijing 1954 / 3-degree Gauss-Kruger CM 90E", 2428: "Beijing 1954 / 3-degree Gauss-Kruger CM 93E", 2429: "Beijing 1954 / 3-degree Gauss-Kruger CM 96E", 2430: "Beijing 1954 / 3-degree Gauss-Kruger CM 99E", 2431: "Beijing 1954 / 3-degree Gauss-Kruger CM 102E", 2432: "Beijing 1954 / 3-degree Gauss-Kruger CM 105E", 2433: "Beijing 1954 / 3-degree Gauss-Kruger CM 108E", 2434: "Beijing 1954 / 3-degree Gauss-Kruger CM 111E", 2435: "Beijing 1954 / 3-degree Gauss-Kruger CM 114E", 2436: "Beijing 1954 / 3-degree Gauss-Kruger CM 117E", 2437: "Beijing 1954 / 3-degree Gauss-Kruger CM 120E", 2438: "Beijing 1954 / 3-degree Gauss-Kruger CM 123E", 2439: "Beijing 1954 / 3-degree Gauss-Kruger CM 126E", 2440: "Beijing 1954 / 3-degree Gauss-Kruger CM 129E", 2441: "Beijing 1954 / 3-degree Gauss-Kruger CM 132E", 2442: "Beijing 1954 / 3-degree Gauss-Kruger CM 135E", 2443: "JGD2000 / Japan Plane Rectangular CS I", 2444: "JGD2000 / Japan Plane Rectangular CS II", 2445: "JGD2000 / Japan Plane Rectangular CS III", 2446: "JGD2000 / Japan Plane Rectangular CS IV", 2447: "JGD2000 / Japan Plane Rectangular CS V", 2448: "JGD2000 / Japan Plane Rectangular CS VI", 2449: "JGD2000 / Japan Plane Rectangular CS VII", 2450: "JGD2000 / Japan Plane Rectangular CS VIII", 2451: "JGD2000 / Japan Plane Rectangular CS IX", 2452: "JGD2000 / Japan Plane Rectangular CS X", 2453: "JGD2000 / Japan Plane Rectangular CS XI", 2454: "JGD2000 / Japan Plane Rectangular CS XII", 2455: "JGD2000 / Japan Plane Rectangular CS XIII", 2456: "JGD2000 / Japan Plane Rectangular CS XIV", 2457: "JGD2000 / Japan Plane Rectangular CS XV", 2458: "JGD2000 / Japan Plane Rectangular CS XVI", 2459: "JGD2000 / Japan Plane Rectangular CS XVII", 2460: "JGD2000 / Japan Plane Rectangular CS XVIII", 2461: "JGD2000 / Japan Plane Rectangular CS XIX", 2462: "Albanian 1987 / Gauss-Kruger zone 4", 2463: "Pulkovo 1995 / Gauss-Kruger CM 21E", 2464: "Pulkovo 1995 / Gauss-Kruger CM 27E", 2465: "Pulkovo 1995 / Gauss-Kruger CM 33E", 2466: "Pulkovo 1995 / Gauss-Kruger CM 39E", 2467: "Pulkovo 1995 / Gauss-Kruger CM 45E", 2468: "Pulkovo 1995 / Gauss-Kruger CM 51E", 2469: "Pulkovo 1995 / Gauss-Kruger CM 57E", 2470: "Pulkovo 1995 / Gauss-Kruger CM 63E", 2471: "Pulkovo 1995 / Gauss-Kruger CM 69E", 2472: "Pulkovo 1995 / Gauss-Kruger CM 75E", 2473: "Pulkovo 1995 / Gauss-Kruger CM 81E", 2474: "Pulkovo 1995 / Gauss-Kruger CM 87E", 2475: "Pulkovo 1995 / Gauss-Kruger CM 93E", 2476: "Pulkovo 1995 / Gauss-Kruger CM 99E", 2477: "Pulkovo 1995 / Gauss-Kruger CM 105E", 2478: "Pulkovo 1995 / Gauss-Kruger CM 111E", 2479: "Pulkovo 1995 / Gauss-Kruger CM 117E", 2480: "Pulkovo 1995 / Gauss-Kruger CM 123E", 2481: "Pulkovo 1995 / Gauss-Kruger CM 129E", 2482: "Pulkovo 1995 / Gauss-Kruger CM 135E", 2483: "Pulkovo 1995 / Gauss-Kruger CM 141E", 2484: "Pulkovo 1995 / Gauss-Kruger CM 147E", 2485: "Pulkovo 1995 / Gauss-Kruger CM 153E", 2486: "Pulkovo 1995 / Gauss-Kruger CM 159E", 2487: "Pulkovo 1995 / Gauss-Kruger CM 165E", 2488: "Pulkovo 1995 / Gauss-Kruger CM 171E", 2489: "Pulkovo 1995 / Gauss-Kruger CM 177E", 2490: "Pulkovo 1995 / Gauss-Kruger CM 177W", 2491: "Pulkovo 1995 / Gauss-Kruger CM 171W", 2492: "Pulkovo 1942 / Gauss-Kruger CM 9E", 2493: "Pulkovo 1942 / Gauss-Kruger CM 15E", 2494: "Pulkovo 1942 / Gauss-Kruger CM 21E", 2495: "Pulkovo 1942 / Gauss-Kruger CM 27E", 2496: "Pulkovo 1942 / Gauss-Kruger CM 33E", 2497: "Pulkovo 1942 / Gauss-Kruger CM 39E", 2498: "Pulkovo 1942 / Gauss-Kruger CM 45E", 2499: "Pulkovo 1942 / Gauss-Kruger CM 51E", 2500: "Pulkovo 1942 / Gauss-Kruger CM 57E", 2501: "Pulkovo 1942 / Gauss-Kruger CM 63E", 2502: "Pulkovo 1942 / Gauss-Kruger CM 69E", 2503: "Pulkovo 1942 / Gauss-Kruger CM 75E", 2504: "Pulkovo 1942 / Gauss-Kruger CM 81E", 2505: "Pulkovo 1942 / Gauss-Kruger CM 87E", 2506: "Pulkovo 1942 / Gauss-Kruger CM 93E", 2507: "Pulkovo 1942 / Gauss-Kruger CM 99E", 2508: "Pulkovo 1942 / Gauss-Kruger CM 105E", 2509: "Pulkovo 1942 / Gauss-Kruger CM 111E", 2510: "Pulkovo 1942 / Gauss-Kruger CM 117E", 2511: "Pulkovo 1942 / Gauss-Kruger CM 123E", 2512: "Pulkovo 1942 / Gauss-Kruger CM 129E", 2513: "Pulkovo 1942 / Gauss-Kruger CM 135E", 2514: "Pulkovo 1942 / Gauss-Kruger CM 141E", 2515: "Pulkovo 1942 / Gauss-Kruger CM 147E", 2516: "Pulkovo 1942 / Gauss-Kruger CM 153E", 2517: "Pulkovo 1942 / Gauss-Kruger CM 159E", 2518: "Pulkovo 1942 / Gauss-Kruger CM 165E", 2519: "Pulkovo 1942 / Gauss-Kruger CM 171E", 2520: "Pulkovo 1942 / Gauss-Kruger CM 177E", 2521: "Pulkovo 1942 / Gauss-Kruger CM 177W", 2522: "Pulkovo 1942 / Gauss-Kruger CM 171W", 2523: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 7", 2524: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 8", 2525: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 9", 2526: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 10", 2527: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 11", 2528: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 12", 2529: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 13", 2530: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 14", 2531: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 15", 2532: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 16", 2533: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 17", 2534: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 18", 2535: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 19", 2536: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 20", 2537: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 21", 2538: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 22", 2539: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 23", 2540: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 24", 2541: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 25", 2542: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 26", 2543: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 27", 2544: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 28", 2545: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 29", 2546: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 30", 2547: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 31", 2548: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 32", 2549: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 33", 2550: "Samboja / UTM zone 50S", 2551: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 34", 2552: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 35", 2553: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 36", 2554: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 37", 2555: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 38", 2556: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 39", 2557: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 40", 2558: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 41", 2559: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 42", 2560: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 43", 2561: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 44", 2562: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 45", 2563: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 46", 2564: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 47", 2565: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 48", 2566: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 49", 2567: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 50", 2568: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 51", 2569: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 52", 2570: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 53", 2571: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 54", 2572: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 55", 2573: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 56", 2574: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 57", 2575: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 58", 2576: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 59", 2577: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 60", 2578: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 61", 2579: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 62", 2580: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 63", 2581: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 64", 2582: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 21E", 2583: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 24E", 2584: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 27E", 2585: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 30E", 2586: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 33E", 2587: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 36E", 2588: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 39E", 2589: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 42E", 2590: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 45E", 2591: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 48E", 2592: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 51E", 2593: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 54E", 2594: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 57E", 2595: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 60E", 2596: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 63E", 2597: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 66E", 2598: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 69E", 2599: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 72E", 2600: "Lietuvos Koordinoei Sistema 1994", 2601: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 75E", 2602: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 78E", 2603: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 81E", 2604: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 84E", 2605: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 87E", 2606: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 90E", 2607: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 93E", 2608: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 96E", 2609: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 99E", 2610: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 102E", 2611: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 105E", 2612: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 108E", 2613: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 111E", 2614: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 114E", 2615: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 117E", 2616: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 120E", 2617: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 123E", 2618: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 126E", 2619: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 129E", 2620: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 132E", 2621: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 135E", 2622: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 138E", 2623: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 141E", 2624: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 144E", 2625: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 147E", 2626: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 150E", 2627: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 153E", 2628: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 156E", 2629: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 159E", 2630: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 162E", 2631: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 165E", 2632: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 168E", 2633: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 171E", 2634: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 174E", 2635: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 177E", 2636: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 180E", 2637: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 177W", 2638: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 174W", 2639: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 171W", 2640: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 168W", 2641: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 7", 2642: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 8", 2643: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 9", 2644: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 10", 2645: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 11", 2646: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 12", 2647: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 13", 2648: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 14", 2649: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 15", 2650: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 16", 2651: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 17", 2652: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 18", 2653: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 19", 2654: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 20", 2655: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 21", 2656: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 22", 2657: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 23", 2658: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 24", 2659: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 25", 2660: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 26", 2661: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 27", 2662: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 28", 2663: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 29", 2664: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 30", 2665: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 31", 2666: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 32", 2667: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 33", 2668: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 34", 2669: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 35", 2670: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 36", 2671: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 37", 2672: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 38", 2673: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 39", 2674: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 40", 2675: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 41", 2676: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 42", 2677: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 43", 2678: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 44", 2679: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 45", 2680: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 46", 2681: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 47", 2682: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 48", 2683: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 49", 2684: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 50", 2685: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 51", 2686: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 52", 2687: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 53", 2688: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 54", 2689: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 55", 2690: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 56", 2691: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 57", 2692: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 58", 2693: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 59", 2694: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 60", 2695: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 61", 2696: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 62", 2697: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 63", 2698: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 64", 2699: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 21E", 2700: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 24E", 2701: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 27E", 2702: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 30E", 2703: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 33E", 2704: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 36E", 2705: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 39E", 2706: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 42E", 2707: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 45E", 2708: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 48E", 2709: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 51E", 2710: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 54E", 2711: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 57E", 2712: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 60E", 2713: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 63E", 2714: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 66E", 2715: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 69E", 2716: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 72E", 2717: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 75E", 2718: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 78E", 2719: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 81E", 2720: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 84E", 2721: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 87E", 2722: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 90E", 2723: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 93E", 2724: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 96E", 2725: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 99E", 2726: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 102E", 2727: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 105E", 2728: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 108E", 2729: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 111E", 2730: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 114E", 2731: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 117E", 2732: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 120E", 2733: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 123E", 2734: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 126E", 2735: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 129E", 2736: "Tete / UTM zone 36S", 2737: "Tete / UTM zone 37S", 2738: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 132E", 2739: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 135E", 2740: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 138E", 2741: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 141E", 2742: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 144E", 2743: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 147E", 2744: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 150E", 2745: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 153E", 2746: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 156E", 2747: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 159E", 2748: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 162E", 2749: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 165E", 2750: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 168E", 2751: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 171E", 2752: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 174E", 2753: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 177E", 2754: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 180E", 2755: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 177W", 2756: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 174W", 2757: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 171W", 2758: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 168W", 2759: "NAD83(HARN) / Alabama East", 2760: "NAD83(HARN) / Alabama West", 2761: "NAD83(HARN) / Arizona East", 2762: "NAD83(HARN) / Arizona Central", 2763: "NAD83(HARN) / Arizona West", 2764: "NAD83(HARN) / Arkansas North", 2765: "NAD83(HARN) / Arkansas South", 2766: "NAD83(HARN) / California zone 1", 2767: "NAD83(HARN) / California zone 2", 2768: "NAD83(HARN) / California zone 3", 2769: "NAD83(HARN) / California zone 4", 2770: "NAD83(HARN) / California zone 5", 2771: "NAD83(HARN) / California zone 6", 2772: "NAD83(HARN) / Colorado North", 2773: "NAD83(HARN) / Colorado Central", 2774: "NAD83(HARN) / Colorado South", 2775: "NAD83(HARN) / Connecticut", 2776: "NAD83(HARN) / Delaware", 2777: "NAD83(HARN) / Florida East", 2778: "NAD83(HARN) / Florida West", 2779: "NAD83(HARN) / Florida North", 2780: "NAD83(HARN) / Georgia East", 2781: "NAD83(HARN) / Georgia West", 2782: "NAD83(HARN) / Hawaii zone 1", 2783: "NAD83(HARN) / Hawaii zone 2", 2784: "NAD83(HARN) / Hawaii zone 3", 2785: "NAD83(HARN) / Hawaii zone 4", 2786: "NAD83(HARN) / Hawaii zone 5", 2787: "NAD83(HARN) / Idaho East", 2788: "NAD83(HARN) / Idaho Central", 2789: "NAD83(HARN) / Idaho West", 2790: "NAD83(HARN) / Illinois East", 2791: "NAD83(HARN) / Illinois West", 2792: "NAD83(HARN) / Indiana East", 2793: "NAD83(HARN) / Indiana West", 2794: "NAD83(HARN) / Iowa North", 2795: "NAD83(HARN) / Iowa South", 2796: "NAD83(HARN) / Kansas North", 2797: "NAD83(HARN) / Kansas South", 2798: "NAD83(HARN) / Kentucky North", 2799: "NAD83(HARN) / Kentucky South", 2800: "NAD83(HARN) / Louisiana North", 2801: "NAD83(HARN) / Louisiana South", 2802: "NAD83(HARN) / Maine East", 2803: "NAD83(HARN) / Maine West", 2804: "NAD83(HARN) / Maryland", 2805: "NAD83(HARN) / Massachusetts Mainland", 2806: "NAD83(HARN) / Massachusetts Island", 2807: "NAD83(HARN) / Michigan North", 2808: "NAD83(HARN) / Michigan Central", 2809: "NAD83(HARN) / Michigan South", 2810: "NAD83(HARN) / Minnesota North", 2811: "NAD83(HARN) / Minnesota Central", 2812: "NAD83(HARN) / Minnesota South", 2813: "NAD83(HARN) / Mississippi East", 2814: "NAD83(HARN) / Mississippi West", 2815: "NAD83(HARN) / Missouri East", 2816: "NAD83(HARN) / Missouri Central", 2817: "NAD83(HARN) / Missouri West", 2818: "NAD83(HARN) / Montana", 2819: "NAD83(HARN) / Nebraska", 2820: "NAD83(HARN) / Nevada East", 2821: "NAD83(HARN) / Nevada Central", 2822: "NAD83(HARN) / Nevada West", 2823: "NAD83(HARN) / New Hampshire", 2824: "NAD83(HARN) / New Jersey", 2825: "NAD83(HARN) / New Mexico East", 2826: "NAD83(HARN) / New Mexico Central", 2827: "NAD83(HARN) / New Mexico West", 2828: "NAD83(HARN) / New York East", 2829: "NAD83(HARN) / New York Central", 2830: "NAD83(HARN) / New York West", 2831: "NAD83(HARN) / New York Long Island", 2832: "NAD83(HARN) / North Dakota North", 2833: "NAD83(HARN) / North Dakota South", 2834: "NAD83(HARN) / Ohio North", 2835: "NAD83(HARN) / Ohio South", 2836: "NAD83(HARN) / Oklahoma North", 2837: "NAD83(HARN) / Oklahoma South", 2838: "NAD83(HARN) / Oregon North", 2839: "NAD83(HARN) / Oregon South", 2840: "NAD83(HARN) / Rhode Island", 2841: "NAD83(HARN) / South Dakota North", 2842: "NAD83(HARN) / South Dakota South", 2843: "NAD83(HARN) / Tennessee", 2844: "NAD83(HARN) / Texas North", 2845: "NAD83(HARN) / Texas North Central", 2846: "NAD83(HARN) / Texas Central", 2847: "NAD83(HARN) / Texas South Central", 2848: "NAD83(HARN) / Texas South", 2849: "NAD83(HARN) / Utah North", 2850: "NAD83(HARN) / Utah Central", 2851: "NAD83(HARN) / Utah South", 2852: "NAD83(HARN) / Vermont", 2853: "NAD83(HARN) / Virginia North", 2854: "NAD83(HARN) / Virginia South", 2855: "NAD83(HARN) / Washington North", 2856: "NAD83(HARN) / Washington South", 2857: "NAD83(HARN) / West Virginia North", 2858: "NAD83(HARN) / West Virginia South", 2859: "NAD83(HARN) / Wisconsin North", 2860: "NAD83(HARN) / Wisconsin Central", 2861: "NAD83(HARN) / Wisconsin South", 2862: "NAD83(HARN) / Wyoming East", 2863: "NAD83(HARN) / Wyoming East Central", 2864: "NAD83(HARN) / Wyoming West Central", 2865: "NAD83(HARN) / Wyoming West", 2866: "NAD83(HARN) / Puerto Rico and Virgin Is.", 2867: "NAD83(HARN) / Arizona East (ft)", 2868: "NAD83(HARN) / Arizona Central (ft)", 2869: "NAD83(HARN) / Arizona West (ft)", 2870: "NAD83(HARN) / California zone 1 (ftUS)", 2871: "NAD83(HARN) / California zone 2 (ftUS)", 2872: "NAD83(HARN) / California zone 3 (ftUS)", 2873: "NAD83(HARN) / California zone 4 (ftUS)", 2874: "NAD83(HARN) / California zone 5 (ftUS)", 2875: "NAD83(HARN) / California zone 6 (ftUS)", 2876: "NAD83(HARN) / Colorado North (ftUS)", 2877: "NAD83(HARN) / Colorado Central (ftUS)", 2878: "NAD83(HARN) / Colorado South (ftUS)", 2879: "NAD83(HARN) / Connecticut (ftUS)", 2880: "NAD83(HARN) / Delaware (ftUS)", 2881: "NAD83(HARN) / Florida East (ftUS)", 2882: "NAD83(HARN) / Florida West (ftUS)", 2883: "NAD83(HARN) / Florida North (ftUS)", 2884: "NAD83(HARN) / Georgia East (ftUS)", 2885: "NAD83(HARN) / Georgia West (ftUS)", 2886: "NAD83(HARN) / Idaho East (ftUS)", 2887: "NAD83(HARN) / Idaho Central (ftUS)", 2888: "NAD83(HARN) / Idaho West (ftUS)", 2889: "NAD83(HARN) / Indiana East (ftUS)", 2890: "NAD83(HARN) / Indiana West (ftUS)", 2891: "NAD83(HARN) / Kentucky North (ftUS)", 2892: "NAD83(HARN) / Kentucky South (ftUS)", 2893: "NAD83(HARN) / Maryland (ftUS)", 2894: "NAD83(HARN) / Massachusetts Mainland (ftUS)", 2895: "NAD83(HARN) / Massachusetts Island (ftUS)", 2896: "NAD83(HARN) / Michigan North (ft)", 2897: "NAD83(HARN) / Michigan Central (ft)", 2898: "NAD83(HARN) / Michigan South (ft)", 2899: "NAD83(HARN) / Mississippi East (ftUS)", 2900: "NAD83(HARN) / Mississippi West (ftUS)", 2901: "NAD83(HARN) / Montana (ft)", 2902: "NAD83(HARN) / New Mexico East (ftUS)", 2903: "NAD83(HARN) / New Mexico Central (ftUS)", 2904: "NAD83(HARN) / New Mexico West (ftUS)", 2905: "NAD83(HARN) / New York East (ftUS)", 2906: "NAD83(HARN) / New York Central (ftUS)", 2907: "NAD83(HARN) / New York West (ftUS)", 2908: "NAD83(HARN) / New York Long Island (ftUS)", 2909: "NAD83(HARN) / North Dakota North (ft)", 2910: "NAD83(HARN) / North Dakota South (ft)", 2911: "NAD83(HARN) / Oklahoma North (ftUS)", 2912: "NAD83(HARN) / Oklahoma South (ftUS)", 2913: "NAD83(HARN) / Oregon North (ft)", 2914: "NAD83(HARN) / Oregon South (ft)", 2915: "NAD83(HARN) / Tennessee (ftUS)", 2916: "NAD83(HARN) / Texas North (ftUS)", 2917: "NAD83(HARN) / Texas North Central (ftUS)", 2918: "NAD83(HARN) / Texas Central (ftUS)", 2919: "NAD83(HARN) / Texas South Central (ftUS)", 2920: "NAD83(HARN) / Texas South (ftUS)", 2921: "NAD83(HARN) / Utah North (ft)", 2922: "NAD83(HARN) / Utah Central (ft)", 2923: "NAD83(HARN) / Utah South (ft)", 2924: "NAD83(HARN) / Virginia North (ftUS)", 2925: "NAD83(HARN) / Virginia South (ftUS)", 2926: "NAD83(HARN) / Washington North (ftUS)", 2927: "NAD83(HARN) / Washington South (ftUS)", 2928: "NAD83(HARN) / Wisconsin North (ftUS)", 2929: "NAD83(HARN) / Wisconsin Central (ftUS)", 2930: "NAD83(HARN) / Wisconsin South (ftUS)", 2931: "Beduaram / TM 13 NE", 2932: "QND95 / Qatar National Grid", 2933: "Segara / UTM zone 50S", 2934: "Segara (Jakarta) / NEIEZ", 2935: "Pulkovo 1942 / CS63 zone A1", 2936: "Pulkovo 1942 / CS63 zone A2", 2937: "Pulkovo 1942 / CS63 zone A3", 2938: "Pulkovo 1942 / CS63 zone A4", 2939: "Pulkovo 1942 / CS63 zone K2", 2940: "Pulkovo 1942 / CS63 zone K3", 2941: "Pulkovo 1942 / CS63 zone K4", 2942: "Porto Santo / UTM zone 28N", 2943: "Selvagem Grande / UTM zone 28N", 2944: "NAD83(CSRS) / SCoPQ zone 2", 2945: "NAD83(CSRS) / MTM zone 3", 2946: "NAD83(CSRS) / MTM zone 4", 2947: "NAD83(CSRS) / MTM zone 5", 2948: "NAD83(CSRS) / MTM zone 6", 2949: "NAD83(CSRS) / MTM zone 7", 2950: "NAD83(CSRS) / MTM zone 8", 2951: "NAD83(CSRS) / MTM zone 9", 2952: "NAD83(CSRS) / MTM zone 10", 2953: "NAD83(CSRS) / New Brunswick Stereographic", 2954: "NAD83(CSRS) / Prince Edward Isl. Stereographic (NAD83)", 2955: "NAD83(CSRS) / UTM zone 11N", 2956: "NAD83(CSRS) / UTM zone 12N", 2957: "NAD83(CSRS) / UTM zone 13N", 2958: "NAD83(CSRS) / UTM zone 17N", 2959: "NAD83(CSRS) / UTM zone 18N", 2960: "NAD83(CSRS) / UTM zone 19N", 2961: "NAD83(CSRS) / UTM zone 20N", 2962: "NAD83(CSRS) / UTM zone 21N", 2963: "Lisbon 1890 (Lisbon) / Portugal Bonne", 2964: "NAD27 / Alaska Albers", 2965: "NAD83 / Indiana East (ftUS)", 2966: "NAD83 / Indiana West (ftUS)", 2967: "NAD83(HARN) / Indiana East (ftUS)", 2968: "NAD83(HARN) / Indiana West (ftUS)", 2969: "Fort Marigot / UTM zone 20N", 2970: "Guadeloupe 1948 / UTM zone 20N", 2971: "CSG67 / UTM zone 22N", 2972: "RGFG95 / UTM zone 22N", 2973: "Martinique 1938 / UTM zone 20N", 2975: "RGR92 / UTM zone 40S", 2976: "Tahiti 52 / UTM zone 6S", 2977: "Tahaa 54 / UTM zone 5S", 2978: "IGN72 Nuku Hiva / UTM zone 7S", 2979: "K0 1949 / UTM zone 42S", 2980: "Combani 1950 / UTM zone 38S", 2981: "IGN56 Lifou / UTM zone 58S", 2982: "IGN72 Grand Terre / UTM zone 58S", 2983: "ST87 Ouvea / UTM zone 58S", 2984: "RGNC 1991 / Lambert New Caledonia", 2985: "Petrels 1972 / Terre Adelie Polar Stereographic", 2986: "Perroud 1950 / Terre Adelie Polar Stereographic", 2987: "Saint Pierre et Miquelon 1950 / UTM zone 21N", 2988: "MOP78 / UTM zone 1S", 2989: "RRAF 1991 / UTM zone 20N", 2990: "Reunion 1947 / TM Reunion", 2991: "NAD83 / Oregon LCC (m)", 2992: "NAD83 / Oregon GIC Lambert (ft)", 2993: "NAD83(HARN) / Oregon LCC (m)", 2994: "NAD83(HARN) / Oregon GIC Lambert (ft)", 2995: "IGN53 Mare / UTM zone 58S", 2996: "ST84 Ile des Pins / UTM zone 58S", 2997: "ST71 Belep / UTM zone 58S", 2998: "NEA74 Noumea / UTM zone 58S", 2999: "Grand Comoros / UTM zone 38S", 3000: "Segara / NEIEZ", 3001: "Batavia / NEIEZ", 3002: "Makassar / NEIEZ", 3003: "Monte Mario / Italy zone 1", 3004: "Monte Mario / Italy zone 2", 3005: "NAD83 / BC Albers", 3006: "SWEREF99 TM", 3007: "SWEREF99 12 00", 3008: "SWEREF99 13 30", 3009: "SWEREF99 15 00", 3010: "SWEREF99 16 30", 3011: "SWEREF99 18 00", 3012: "SWEREF99 14 15", 3013: "SWEREF99 15 45", 3014: "SWEREF99 17 15", 3015: "SWEREF99 18 45", 3016: "SWEREF99 20 15", 3017: "SWEREF99 21 45", 3018: "SWEREF99 23 15", 3019: "RT90 7.5 gon V", 3020: "RT90 5 gon V", 3021: "RT90 2.5 gon V", 3022: "RT90 0 gon", 3023: "RT90 2.5 gon O", 3024: "RT90 5 gon O", 3025: "RT38 7.5 gon V", 3026: "RT38 5 gon V", 3027: "RT38 2.5 gon V", 3028: "RT38 0 gon", 3029: "RT38 2.5 gon O", 3030: "RT38 5 gon O", 3031: "WGS 84 / Antarctic Polar Stereographic", 3032: "WGS 84 / Australian Antarctic Polar Stereographic", 3033: "WGS 84 / Australian Antarctic Lambert", 3034: "ETRS89 / LCC Europe", 3035: "ETRS89 / LAEA Europe", 3036: "Moznet / UTM zone 36S", 3037: "Moznet / UTM zone 37S", 3038: "ETRS89 / TM26", 3039: "ETRS89 / TM27", 3040: "ETRS89 / UTM zone 28N (N-E)", 3041: "ETRS89 / UTM zone 29N (N-E)", 3042: "ETRS89 / UTM zone 30N (N-E)", 3043: "ETRS89 / UTM zone 31N (N-E)", 3044: "ETRS89 / UTM zone 32N (N-E)", 3045: "ETRS89 / UTM zone 33N (N-E)", 3046: "ETRS89 / UTM zone 34N (N-E)", 3047: "ETRS89 / UTM zone 35N (N-E)", 3048: "ETRS89 / UTM zone 36N (N-E)", 3049: "ETRS89 / UTM zone 37N (N-E)", 3050: "ETRS89 / TM38", 3051: "ETRS89 / TM39", 3052: "Reykjavik 1900 / Lambert 1900", 3053: "Hjorsey 1955 / Lambert 1955", 3054: "Hjorsey 1955 / UTM zone 26N", 3055: "Hjorsey 1955 / UTM zone 27N", 3056: "Hjorsey 1955 / UTM zone 28N", 3057: "ISN93 / Lambert 1993", 3058: "Helle 1954 / Jan Mayen Grid", 3059: "LKS92 / Latvia TM", 3060: "IGN72 Grande Terre / UTM zone 58S", 3061: "Porto Santo 1995 / UTM zone 28N", 3062: "Azores Oriental 1995 / UTM zone 26N", 3063: "Azores Central 1995 / UTM zone 26N", 3064: "IGM95 / UTM zone 32N", 3065: "IGM95 / UTM zone 33N", 3066: "ED50 / Jordan TM", 3067: "ETRS89 / TM35FIN(E,N)", 3068: "DHDN / Soldner Berlin", 3069: "NAD27 / Wisconsin Transverse Mercator", 3070: "NAD83 / Wisconsin Transverse Mercator", 3071: "NAD83(HARN) / Wisconsin Transverse Mercator", 3072: "NAD83 / Maine CS2000 East", 3073: "NAD83 / Maine CS2000 Central", 3074: "NAD83 / Maine CS2000 West", 3075: "NAD83(HARN) / Maine CS2000 East", 3076: "NAD83(HARN) / Maine CS2000 Central", 3077: "NAD83(HARN) / Maine CS2000 West", 3078: "NAD83 / Michigan Oblique Mercator", 3079: "NAD83(HARN) / Michigan Oblique Mercator", 3080: "NAD27 / Shackleford", 3081: "NAD83 / Texas State Mapping System", 3082: "NAD83 / Texas Centric Lambert Conformal", 3083: "NAD83 / Texas Centric Albers Equal Area", 3084: "NAD83(HARN) / Texas Centric Lambert Conformal", 3085: "NAD83(HARN) / Texas Centric Albers Equal Area", 3086: "NAD83 / Florida GDL Albers", 3087: "NAD83(HARN) / Florida GDL Albers", 3088: "NAD83 / Kentucky Single Zone", 3089: "NAD83 / Kentucky Single Zone (ftUS)", 3090: "NAD83(HARN) / Kentucky Single Zone", 3091: "NAD83(HARN) / Kentucky Single Zone (ftUS)", 3092: "Tokyo / UTM zone 51N", 3093: "Tokyo / UTM zone 52N", 3094: "Tokyo / UTM zone 53N", 3095: "Tokyo / UTM zone 54N", 3096: "Tokyo / UTM zone 55N", 3097: "JGD2000 / UTM zone 51N", 3098: "JGD2000 / UTM zone 52N", 3099: "JGD2000 / UTM zone 53N", 3100: "JGD2000 / UTM zone 54N", 3101: "JGD2000 / UTM zone 55N", 3102: "American Samoa 1962 / American Samoa Lambert", 3103: "Mauritania 1999 / UTM zone 28N", 3104: "Mauritania 1999 / UTM zone 29N", 3105: "Mauritania 1999 / UTM zone 30N", 3106: "Gulshan 303 / Bangladesh Transverse Mercator", 3107: "GDA94 / SA Lambert", 3108: "ETRS89 / Guernsey Grid", 3109: "ETRS89 / Jersey Transverse Mercator", 3110: "AGD66 / Vicgrid66", 3111: "GDA94 / Vicgrid94", 3112: "GDA94 / Geoscience Australia Lambert", 3113: "GDA94 / BCSG02", 3114: "MAGNA-SIRGAS / Colombia Far West zone", 3115: "MAGNA-SIRGAS / Colombia West zone", 3116: "MAGNA-SIRGAS / Colombia Bogota zone", 3117: "MAGNA-SIRGAS / Colombia East Central zone", 3118: "MAGNA-SIRGAS / Colombia East zone", 3119: "Douala 1948 / AEF west", 3120: "Pulkovo 1942(58) / Poland zone I", 3121: "PRS92 / Philippines zone 1", 3122: "PRS92 / Philippines zone 2", 3123: "PRS92 / Philippines zone 3", 3124: "PRS92 / Philippines zone 4", 3125: "PRS92 / Philippines zone 5", 3126: "ETRS89 / ETRS-GK19FIN", 3127: "ETRS89 / ETRS-GK20FIN", 3128: "ETRS89 / ETRS-GK21FIN", 3129: "ETRS89 / ETRS-GK22FIN", 3130: "ETRS89 / ETRS-GK23FIN", 3131: "ETRS89 / ETRS-GK24FIN", 3132: "ETRS89 / ETRS-GK25FIN", 3133: "ETRS89 / ETRS-GK26FIN", 3134: "ETRS89 / ETRS-GK27FIN", 3135: "ETRS89 / ETRS-GK28FIN", 3136: "ETRS89 / ETRS-GK29FIN", 3137: "ETRS89 / ETRS-GK30FIN", 3138: "ETRS89 / ETRS-GK31FIN", 3139: "Vanua Levu 1915 / Vanua Levu Grid", 3140: "Viti Levu 1912 / Viti Levu Grid", 3141: "Fiji 1956 / UTM zone 60S", 3142: "Fiji 1956 / UTM zone 1S", 3143: "Fiji 1986 / Fiji Map Grid", 3144: "FD54 / Faroe Lambert", 3145: "ETRS89 / Faroe Lambert", 3146: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 6", 3147: "Pulkovo 1942 / 3-degree Gauss-Kruger CM 18E", 3148: "Indian 1960 / UTM zone 48N", 3149: "Indian 1960 / UTM zone 49N", 3150: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 6", 3151: "Pulkovo 1995 / 3-degree Gauss-Kruger CM 18E", 3152: "ST74", 3153: "NAD83(CSRS) / BC Albers", 3154: "NAD83(CSRS) / UTM zone 7N", 3155: "NAD83(CSRS) / UTM zone 8N", 3156: "NAD83(CSRS) / UTM zone 9N", 3157: "NAD83(CSRS) / UTM zone 10N", 3158: "NAD83(CSRS) / UTM zone 14N", 3159: "NAD83(CSRS) / UTM zone 15N", 3160: "NAD83(CSRS) / UTM zone 16N", 3161: "NAD83 / Ontario MNR Lambert", 3162: "NAD83(CSRS) / Ontario MNR Lambert", 3163: "RGNC91-93 / Lambert New Caledonia", 3164: "ST87 Ouvea / UTM zone 58S", 3165: "NEA74 Noumea / Noumea Lambert", 3166: "NEA74 Noumea / Noumea Lambert 2", 3167: "Kertau (RSO) / RSO Malaya (ch)", 3168: "Kertau (RSO) / RSO Malaya (m)", 3169: "RGNC91-93 / UTM zone 57S", 3170: "RGNC91-93 / UTM zone 58S", 3171: "RGNC91-93 / UTM zone 59S", 3172: "IGN53 Mare / UTM zone 59S", 3173: "fk89 / Faroe Lambert FK89", 3174: "NAD83 / Great Lakes Albers", 3175: "NAD83 / Great Lakes and St Lawrence Albers", 3176: "Indian 1960 / TM 106 NE", 3177: "LGD2006 / Libya TM", 3178: "GR96 / UTM zone 18N", 3179: "GR96 / UTM zone 19N", 3180: "GR96 / UTM zone 20N", 3181: "GR96 / UTM zone 21N", 3182: "GR96 / UTM zone 22N", 3183: "GR96 / UTM zone 23N", 3184: "GR96 / UTM zone 24N", 3185: "GR96 / UTM zone 25N", 3186: "GR96 / UTM zone 26N", 3187: "GR96 / UTM zone 27N", 3188: "GR96 / UTM zone 28N", 3189: "GR96 / UTM zone 29N", 3190: "LGD2006 / Libya TM zone 5", 3191: "LGD2006 / Libya TM zone 6", 3192: "LGD2006 / Libya TM zone 7", 3193: "LGD2006 / Libya TM zone 8", 3194: "LGD2006 / Libya TM zone 9", 3195: "LGD2006 / Libya TM zone 10", 3196: "LGD2006 / Libya TM zone 11", 3197: "LGD2006 / Libya TM zone 12", 3198: "LGD2006 / Libya TM zone 13", 3199: "LGD2006 / UTM zone 32N", 3200: "FD58 / Iraq zone", 3201: "LGD2006 / UTM zone 33N", 3202: "LGD2006 / UTM zone 34N", 3203: "LGD2006 / UTM zone 35N", 3204: "WGS 84 / SCAR IMW SP19-20", 3205: "WGS 84 / SCAR IMW SP21-22", 3206: "WGS 84 / SCAR IMW SP23-24", 3207: "WGS 84 / SCAR IMW SQ01-02", 3208: "WGS 84 / SCAR IMW SQ19-20", 3209: "WGS 84 / SCAR IMW SQ21-22", 3210: "WGS 84 / SCAR IMW SQ37-38", 3211: "WGS 84 / SCAR IMW SQ39-40", 3212: "WGS 84 / SCAR IMW SQ41-42", 3213: "WGS 84 / SCAR IMW SQ43-44", 3214: "WGS 84 / SCAR IMW SQ45-46", 3215: "WGS 84 / SCAR IMW SQ47-48", 3216: "WGS 84 / SCAR IMW SQ49-50", 3217: "WGS 84 / SCAR IMW SQ51-52", 3218: "WGS 84 / SCAR IMW SQ53-54", 3219: "WGS 84 / SCAR IMW SQ55-56", 3220: "WGS 84 / SCAR IMW SQ57-58", 3221: "WGS 84 / SCAR IMW SR13-14", 3222: "WGS 84 / SCAR IMW SR15-16", 3223: "WGS 84 / SCAR IMW SR17-18", 3224: "WGS 84 / SCAR IMW SR19-20", 3225: "WGS 84 / SCAR IMW SR27-28", 3226: "WGS 84 / SCAR IMW SR29-30", 3227: "WGS 84 / SCAR IMW SR31-32", 3228: "WGS 84 / SCAR IMW SR33-34", 3229: "WGS 84 / SCAR IMW SR35-36", 3230: "WGS 84 / SCAR IMW SR37-38", 3231: "WGS 84 / SCAR IMW SR39-40", 3232: "WGS 84 / SCAR IMW SR41-42", 3233: "WGS 84 / SCAR IMW SR43-44", 3234: "WGS 84 / SCAR IMW SR45-46", 3235: "WGS 84 / SCAR IMW SR47-48", 3236: "WGS 84 / SCAR IMW SR49-50", 3237: "WGS 84 / SCAR IMW SR51-52", 3238: "WGS 84 / SCAR IMW SR53-54", 3239: "WGS 84 / SCAR IMW SR55-56", 3240: "WGS 84 / SCAR IMW SR57-58", 3241: "WGS 84 / SCAR IMW SR59-60", 3242: "WGS 84 / SCAR IMW SS04-06", 3243: "WGS 84 / SCAR IMW SS07-09", 3244: "WGS 84 / SCAR IMW SS10-12", 3245: "WGS 84 / SCAR IMW SS13-15", 3246: "WGS 84 / SCAR IMW SS16-18", 3247: "WGS 84 / SCAR IMW SS19-21", 3248: "WGS 84 / SCAR IMW SS25-27", 3249: "WGS 84 / SCAR IMW SS28-30", 3250: "WGS 84 / SCAR IMW SS31-33", 3251: "WGS 84 / SCAR IMW SS34-36", 3252: "WGS 84 / SCAR IMW SS37-39", 3253: "WGS 84 / SCAR IMW SS40-42", 3254: "WGS 84 / SCAR IMW SS43-45", 3255: "WGS 84 / SCAR IMW SS46-48", 3256: "WGS 84 / SCAR IMW SS49-51", 3257: "WGS 84 / SCAR IMW SS52-54", 3258: "WGS 84 / SCAR IMW SS55-57", 3259: "WGS 84 / SCAR IMW SS58-60", 3260: "WGS 84 / SCAR IMW ST01-04", 3261: "WGS 84 / SCAR IMW ST05-08", 3262: "WGS 84 / SCAR IMW ST09-12", 3263: "WGS 84 / SCAR IMW ST13-16", 3264: "WGS 84 / SCAR IMW ST17-20", 3265: "WGS 84 / SCAR IMW ST21-24", 3266: "WGS 84 / SCAR IMW ST25-28", 3267: "WGS 84 / SCAR IMW ST29-32", 3268: "WGS 84 / SCAR IMW ST33-36", 3269: "WGS 84 / SCAR IMW ST37-40", 3270: "WGS 84 / SCAR IMW ST41-44", 3271: "WGS 84 / SCAR IMW ST45-48", 3272: "WGS 84 / SCAR IMW ST49-52", 3273: "WGS 84 / SCAR IMW ST53-56", 3274: "WGS 84 / SCAR IMW ST57-60", 3275: "WGS 84 / SCAR IMW SU01-05", 3276: "WGS 84 / SCAR IMW SU06-10", 3277: "WGS 84 / SCAR IMW SU11-15", 3278: "WGS 84 / SCAR IMW SU16-20", 3279: "WGS 84 / SCAR IMW SU21-25", 3280: "WGS 84 / SCAR IMW SU26-30", 3281: "WGS 84 / SCAR IMW SU31-35", 3282: "WGS 84 / SCAR IMW SU36-40", 3283: "WGS 84 / SCAR IMW SU41-45", 3284: "WGS 84 / SCAR IMW SU46-50", 3285: "WGS 84 / SCAR IMW SU51-55", 3286: "WGS 84 / SCAR IMW SU56-60", 3287: "WGS 84 / SCAR IMW SV01-10", 3288: "WGS 84 / SCAR IMW SV11-20", 3289: "WGS 84 / SCAR IMW SV21-30", 3290: "WGS 84 / SCAR IMW SV31-40", 3291: "WGS 84 / SCAR IMW SV41-50", 3292: "WGS 84 / SCAR IMW SV51-60", 3293: "WGS 84 / SCAR IMW SW01-60", 3294: "WGS 84 / USGS Transantarctic Mountains", 3295: "Guam 1963 / Yap Islands", 3296: "RGPF / UTM zone 5S", 3297: "RGPF / UTM zone 6S", 3298: "RGPF / UTM zone 7S", 3299: "RGPF / UTM zone 8S", 3300: "Estonian Coordinate System of 1992", 3301: "Estonian Coordinate System of 1997", 3302: "IGN63 Hiva Oa / UTM zone 7S", 3303: "Fatu Iva 72 / UTM zone 7S", 3304: "Tahiti 79 / UTM zone 6S", 3305: "Moorea 87 / UTM zone 6S", 3306: "Maupiti 83 / UTM zone 5S", 3307: "Nakhl-e Ghanem / UTM zone 39N", 3308: "GDA94 / NSW Lambert", 3309: "NAD27 / California Albers", 3310: "NAD83 / California Albers", 3311: "NAD83(HARN) / California Albers", 3312: "CSG67 / UTM zone 21N", 3313: "RGFG95 / UTM zone 21N", 3314: "Katanga 1955 / Katanga Lambert", 3315: "Katanga 1955 / Katanga TM", 3316: "Kasai 1953 / Congo TM zone 22", 3317: "Kasai 1953 / Congo TM zone 24", 3318: "IGC 1962 / Congo TM zone 12", 3319: "IGC 1962 / Congo TM zone 14", 3320: "IGC 1962 / Congo TM zone 16", 3321: "IGC 1962 / Congo TM zone 18", 3322: "IGC 1962 / Congo TM zone 20", 3323: "IGC 1962 / Congo TM zone 22", 3324: "IGC 1962 / Congo TM zone 24", 3325: "IGC 1962 / Congo TM zone 26", 3326: "IGC 1962 / Congo TM zone 28", 3327: "IGC 1962 / Congo TM zone 30", 3328: "Pulkovo 1942(58) / GUGiK-80", 3329: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 5", 3330: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 6", 3331: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 7", 3332: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 8", 3333: "Pulkovo 1942(58) / Gauss-Kruger zone 3", 3334: "Pulkovo 1942(58) / Gauss-Kruger zone 4", 3335: "Pulkovo 1942(58) / Gauss-Kruger zone 5", 3336: "IGN 1962 Kerguelen / UTM zone 42S", 3337: "Le Pouce 1934 / Mauritius Grid", 3338: "NAD83 / Alaska Albers", 3339: "IGCB 1955 / Congo TM zone 12", 3340: "IGCB 1955 / Congo TM zone 14", 3341: "IGCB 1955 / Congo TM zone 16", 3342: "IGCB 1955 / UTM zone 33S", 3343: "Mauritania 1999 / UTM zone 28N", 3344: "Mauritania 1999 / UTM zone 29N", 3345: "Mauritania 1999 / UTM zone 30N", 3346: "LKS94 / Lithuania TM", 3347: "NAD83 / Statistics Canada Lambert", 3348: "NAD83(CSRS) / Statistics Canada Lambert", 3349: "WGS 84 / PDC Mercator", 3350: "Pulkovo 1942 / CS63 zone C0", 3351: "Pulkovo 1942 / CS63 zone C1", 3352: "Pulkovo 1942 / CS63 zone C2", 3353: "Mhast (onshore) / UTM zone 32S", 3354: "Mhast (offshore) / UTM zone 32S", 3355: "Egypt Gulf of Suez S-650 TL / Red Belt", 3356: "Grand Cayman 1959 / UTM zone 17N", 3357: "Little Cayman 1961 / UTM zone 17N", 3358: "NAD83(HARN) / North Carolina", 3359: "NAD83(HARN) / North Carolina (ftUS)", 3360: "NAD83(HARN) / South Carolina", 3361: "NAD83(HARN) / South Carolina (ft)", 3362: "NAD83(HARN) / Pennsylvania North", 3363: "NAD83(HARN) / Pennsylvania North (ftUS)", 3364: "NAD83(HARN) / Pennsylvania South", 3365: "NAD83(HARN) / Pennsylvania South (ftUS)", 3366: "Hong Kong 1963 Grid System", 3367: "IGN Astro 1960 / UTM zone 28N", 3368: "IGN Astro 1960 / UTM zone 29N", 3369: "IGN Astro 1960 / UTM zone 30N", 3370: "NAD27 / UTM zone 59N", 3371: "NAD27 / UTM zone 60N", 3372: "NAD83 / UTM zone 59N", 3373: "NAD83 / UTM zone 60N", 3374: "FD54 / UTM zone 29N", 3375: "GDM2000 / Peninsula RSO", 3376: "GDM2000 / East Malaysia BRSO", 3377: "GDM2000 / Johor Grid", 3378: "GDM2000 / Sembilan and Melaka Grid", 3379: "GDM2000 / Pahang Grid", 3380: "GDM2000 / Selangor Grid", 3381: "GDM2000 / Terengganu Grid", 3382: "GDM2000 / Pinang Grid", 3383: "GDM2000 / Kedah and Perlis Grid", 3384: "GDM2000 / Perak Grid", 3385: "GDM2000 / Kelantan Grid", 3386: "KKJ / Finland zone 0", 3387: "KKJ / Finland zone 5", 3388: "Pulkovo 1942 / Caspian Sea Mercator", 3389: "Pulkovo 1942 / 3-degree Gauss-Kruger zone 60", 3390: "Pulkovo 1995 / 3-degree Gauss-Kruger zone 60", 3391: "Karbala 1979 / UTM zone 37N", 3392: "Karbala 1979 / UTM zone 38N", 3393: "Karbala 1979 / UTM zone 39N", 3394: "Nahrwan 1934 / Iraq zone", 3395: "WGS 84 / World Mercator", 3396: "PD/83 / 3-degree Gauss-Kruger zone 3", 3397: "PD/83 / 3-degree Gauss-Kruger zone 4", 3398: "RD/83 / 3-degree Gauss-Kruger zone 4", 3399: "RD/83 / 3-degree Gauss-Kruger zone 5", 3400: "NAD83 / Alberta 10-TM (Forest)", 3401: "NAD83 / Alberta 10-TM (Resource)", 3402: "NAD83(CSRS) / Alberta 10-TM (Forest)", 3403: "NAD83(CSRS) / Alberta 10-TM (Resource)", 3404: "NAD83(HARN) / North Carolina (ftUS)", 3405: "VN-2000 / UTM zone 48N", 3406: "VN-2000 / UTM zone 49N", 3407: "Hong Kong 1963 Grid System", 3408: "NSIDC EASE-Grid North", 3409: "NSIDC EASE-Grid South", 3410: "NSIDC EASE-Grid Global", 3411: "NSIDC Sea Ice Polar Stereographic North", 3412: "NSIDC Sea Ice Polar Stereographic South", 3413: "WGS 84 / NSIDC Sea Ice Polar Stereographic North", 3414: "SVY21 / Singapore TM", 3415: "WGS 72BE / South China Sea Lambert", 3416: "ETRS89 / Austria Lambert", 3417: "NAD83 / Iowa North (ftUS)", 3418: "NAD83 / Iowa South (ftUS)", 3419: "NAD83 / Kansas North (ftUS)", 3420: "NAD83 / Kansas South (ftUS)", 3421: "NAD83 / Nevada East (ftUS)", 3422: "NAD83 / Nevada Central (ftUS)", 3423: "NAD83 / Nevada West (ftUS)", 3424: "NAD83 / New Jersey (ftUS)", 3425: "NAD83(HARN) / Iowa North (ftUS)", 3426: "NAD83(HARN) / Iowa South (ftUS)", 3427: "NAD83(HARN) / Kansas North (ftUS)", 3428: "NAD83(HARN) / Kansas South (ftUS)", 3429: "NAD83(HARN) / Nevada East (ftUS)", 3430: "NAD83(HARN) / Nevada Central (ftUS)", 3431: "NAD83(HARN) / Nevada West (ftUS)", 3432: "NAD83(HARN) / New Jersey (ftUS)", 3433: "NAD83 / Arkansas North (ftUS)", 3434: "NAD83 / Arkansas South (ftUS)", 3435: "NAD83 / Illinois East (ftUS)", 3436: "NAD83 / Illinois West (ftUS)", 3437: "NAD83 / New Hampshire (ftUS)", 3438: "NAD83 / Rhode Island (ftUS)", 3439: "PSD93 / UTM zone 39N", 3440: "PSD93 / UTM zone 40N", 3441: "NAD83(HARN) / Arkansas North (ftUS)", 3442: "NAD83(HARN) / Arkansas South (ftUS)", 3443: "NAD83(HARN) / Illinois East (ftUS)", 3444: "NAD83(HARN) / Illinois West (ftUS)", 3445: "NAD83(HARN) / New Hampshire (ftUS)", 3446: "NAD83(HARN) / Rhode Island (ftUS)", 3447: "ETRS89 / Belgian Lambert 2005", 3448: "JAD2001 / Jamaica Metric Grid", 3449: "JAD2001 / UTM zone 17N", 3450: "JAD2001 / UTM zone 18N", 3451: "NAD83 / Louisiana North (ftUS)", 3452: "NAD83 / Louisiana South (ftUS)", 3453: "NAD83 / Louisiana Offshore (ftUS)", 3454: "NAD83 / South Dakota North (ftUS)", 3455: "NAD83 / South Dakota South (ftUS)", 3456: "NAD83(HARN) / Louisiana North (ftUS)", 3457: "NAD83(HARN) / Louisiana South (ftUS)", 3458: "NAD83(HARN) / South Dakota North (ftUS)", 3459: "NAD83(HARN) / South Dakota South (ftUS)", 3460: "Fiji 1986 / Fiji Map Grid", 3461: "Dabola 1981 / UTM zone 28N", 3462: "Dabola 1981 / UTM zone 29N", 3463: "NAD83 / Maine CS2000 Central", 3464: "NAD83(HARN) / Maine CS2000 Central", 3465: "NAD83(NSRS2007) / Alabama East", 3466: "NAD83(NSRS2007) / Alabama West", 3467: "NAD83(NSRS2007) / Alaska Albers", 3468: "NAD83(NSRS2007) / Alaska zone 1", 3469: "NAD83(NSRS2007) / Alaska zone 2", 3470: "NAD83(NSRS2007) / Alaska zone 3", 3471: "NAD83(NSRS2007) / Alaska zone 4", 3472: "NAD83(NSRS2007) / Alaska zone 5", 3473: "NAD83(NSRS2007) / Alaska zone 6", 3474: "NAD83(NSRS2007) / Alaska zone 7", 3475: "NAD83(NSRS2007) / Alaska zone 8", 3476: "NAD83(NSRS2007) / Alaska zone 9", 3477: "NAD83(NSRS2007) / Alaska zone 10", 3478: "NAD83(NSRS2007) / Arizona Central", 3479: "NAD83(NSRS2007) / Arizona Central (ft)", 3480: "NAD83(NSRS2007) / Arizona East", 3481: "NAD83(NSRS2007) / Arizona East (ft)", 3482: "NAD83(NSRS2007) / Arizona West", 3483: "NAD83(NSRS2007) / Arizona West (ft)", 3484: "NAD83(NSRS2007) / Arkansas North", 3485: "NAD83(NSRS2007) / Arkansas North (ftUS)", 3486: "NAD83(NSRS2007) / Arkansas South", 3487: "NAD83(NSRS2007) / Arkansas South (ftUS)", 3488: "NAD83(NSRS2007) / California Albers", 3489: "NAD83(NSRS2007) / California zone 1", 3490: "NAD83(NSRS2007) / California zone 1 (ftUS)", 3491: "NAD83(NSRS2007) / California zone 2", 3492: "NAD83(NSRS2007) / California zone 2 (ftUS)", 3493: "NAD83(NSRS2007) / California zone 3", 3494: "NAD83(NSRS2007) / California zone 3 (ftUS)", 3495: "NAD83(NSRS2007) / California zone 4", 3496: "NAD83(NSRS2007) / California zone 4 (ftUS)", 3497: "NAD83(NSRS2007) / California zone 5", 3498: "NAD83(NSRS2007) / California zone 5 (ftUS)", 3499: "NAD83(NSRS2007) / California zone 6", 3500: "NAD83(NSRS2007) / California zone 6 (ftUS)", 3501: "NAD83(NSRS2007) / Colorado Central", 3502: "NAD83(NSRS2007) / Colorado Central (ftUS)", 3503: "NAD83(NSRS2007) / Colorado North", 3504: "NAD83(NSRS2007) / Colorado North (ftUS)", 3505: "NAD83(NSRS2007) / Colorado South", 3506: "NAD83(NSRS2007) / Colorado South (ftUS)", 3507: "NAD83(NSRS2007) / Connecticut", 3508: "NAD83(NSRS2007) / Connecticut (ftUS)", 3509: "NAD83(NSRS2007) / Delaware", 3510: "NAD83(NSRS2007) / Delaware (ftUS)", 3511: "NAD83(NSRS2007) / Florida East", 3512: "NAD83(NSRS2007) / Florida East (ftUS)", 3513: "NAD83(NSRS2007) / Florida GDL Albers", 3514: "NAD83(NSRS2007) / Florida North", 3515: "NAD83(NSRS2007) / Florida North (ftUS)", 3516: "NAD83(NSRS2007) / Florida West", 3517: "NAD83(NSRS2007) / Florida West (ftUS)", 3518: "NAD83(NSRS2007) / Georgia East", 3519: "NAD83(NSRS2007) / Georgia East (ftUS)", 3520: "NAD83(NSRS2007) / Georgia West", 3521: "NAD83(NSRS2007) / Georgia West (ftUS)", 3522: "NAD83(NSRS2007) / Idaho Central", 3523: "NAD83(NSRS2007) / Idaho Central (ftUS)", 3524: "NAD83(NSRS2007) / Idaho East", 3525: "NAD83(NSRS2007) / Idaho East (ftUS)", 3526: "NAD83(NSRS2007) / Idaho West", 3527: "NAD83(NSRS2007) / Idaho West (ftUS)", 3528: "NAD83(NSRS2007) / Illinois East", 3529: "NAD83(NSRS2007) / Illinois East (ftUS)", 3530: "NAD83(NSRS2007) / Illinois West", 3531: "NAD83(NSRS2007) / Illinois West (ftUS)", 3532: "NAD83(NSRS2007) / Indiana East", 3533: "NAD83(NSRS2007) / Indiana East (ftUS)", 3534: "NAD83(NSRS2007) / Indiana West", 3535: "NAD83(NSRS2007) / Indiana West (ftUS)", 3536: "NAD83(NSRS2007) / Iowa North", 3537: "NAD83(NSRS2007) / Iowa North (ftUS)", 3538: "NAD83(NSRS2007) / Iowa South", 3539: "NAD83(NSRS2007) / Iowa South (ftUS)", 3540: "NAD83(NSRS2007) / Kansas North", 3541: "NAD83(NSRS2007) / Kansas North (ftUS)", 3542: "NAD83(NSRS2007) / Kansas South", 3543: "NAD83(NSRS2007) / Kansas South (ftUS)", 3544: "NAD83(NSRS2007) / Kentucky North", 3545: "NAD83(NSRS2007) / Kentucky North (ftUS)", 3546: "NAD83(NSRS2007) / Kentucky Single Zone", 3547: "NAD83(NSRS2007) / Kentucky Single Zone (ftUS)", 3548: "NAD83(NSRS2007) / Kentucky South", 3549: "NAD83(NSRS2007) / Kentucky South (ftUS)", 3550: "NAD83(NSRS2007) / Louisiana North", 3551: "NAD83(NSRS2007) / Louisiana North (ftUS)", 3552: "NAD83(NSRS2007) / Louisiana South", 3553: "NAD83(NSRS2007) / Louisiana South (ftUS)", 3554: "NAD83(NSRS2007) / Maine CS2000 Central", 3555: "NAD83(NSRS2007) / Maine CS2000 East", 3556: "NAD83(NSRS2007) / Maine CS2000 West", 3557: "NAD83(NSRS2007) / Maine East", 3558: "NAD83(NSRS2007) / Maine West", 3559: "NAD83(NSRS2007) / Maryland", 3560: "NAD83 / Utah North (ftUS)", 3561: "Old Hawaiian / Hawaii zone 1", 3562: "Old Hawaiian / Hawaii zone 2", 3563: "Old Hawaiian / Hawaii zone 3", 3564: "Old Hawaiian / Hawaii zone 4", 3565: "Old Hawaiian / Hawaii zone 5", 3566: "NAD83 / Utah Central (ftUS)", 3567: "NAD83 / Utah South (ftUS)", 3568: "NAD83(HARN) / Utah North (ftUS)", 3569: "NAD83(HARN) / Utah Central (ftUS)", 3570: "NAD83(HARN) / Utah South (ftUS)", 3571: "WGS 84 / North Pole LAEA Bering Sea", 3572: "WGS 84 / North Pole LAEA Alaska", 3573: "WGS 84 / North Pole LAEA Canada", 3574: "WGS 84 / North Pole LAEA Atlantic", 3575: "WGS 84 / North Pole LAEA Europe", 3576: "WGS 84 / North Pole LAEA Russia", 3577: "GDA94 / Australian Albers", 3578: "NAD83 / Yukon Albers", 3579: "NAD83(CSRS) / Yukon Albers", 3580: "NAD83 / NWT Lambert", 3581: "NAD83(CSRS) / NWT Lambert", 3582: "NAD83(NSRS2007) / Maryland (ftUS)", 3583: "NAD83(NSRS2007) / Massachusetts Island", 3584: "NAD83(NSRS2007) / Massachusetts Island (ftUS)", 3585: "NAD83(NSRS2007) / Massachusetts Mainland", 3586: "NAD83(NSRS2007) / Massachusetts Mainland (ftUS)", 3587: "NAD83(NSRS2007) / Michigan Central", 3588: "NAD83(NSRS2007) / Michigan Central (ft)", 3589: "NAD83(NSRS2007) / Michigan North", 3590: "NAD83(NSRS2007) / Michigan North (ft)", 3591: "NAD83(NSRS2007) / Michigan Oblique Mercator", 3592: "NAD83(NSRS2007) / Michigan South", 3593: "NAD83(NSRS2007) / Michigan South (ft)", 3594: "NAD83(NSRS2007) / Minnesota Central", 3595: "NAD83(NSRS2007) / Minnesota North", 3596: "NAD83(NSRS2007) / Minnesota South", 3597: "NAD83(NSRS2007) / Mississippi East", 3598: "NAD83(NSRS2007) / Mississippi East (ftUS)", 3599: "NAD83(NSRS2007) / Mississippi West", 3600: "NAD83(NSRS2007) / Mississippi West (ftUS)", 3601: "NAD83(NSRS2007) / Missouri Central", 3602: "NAD83(NSRS2007) / Missouri East", 3603: "NAD83(NSRS2007) / Missouri West", 3604: "NAD83(NSRS2007) / Montana", 3605: "NAD83(NSRS2007) / Montana (ft)", 3606: "NAD83(NSRS2007) / Nebraska", 3607: "NAD83(NSRS2007) / Nevada Central", 3608: "NAD83(NSRS2007) / Nevada Central (ftUS)", 3609: "NAD83(NSRS2007) / Nevada East", 3610: "NAD83(NSRS2007) / Nevada East (ftUS)", 3611: "NAD83(NSRS2007) / Nevada West", 3612: "NAD83(NSRS2007) / Nevada West (ftUS)", 3613: "NAD83(NSRS2007) / New Hampshire", 3614: "NAD83(NSRS2007) / New Hampshire (ftUS)", 3615: "NAD83(NSRS2007) / New Jersey", 3616: "NAD83(NSRS2007) / New Jersey (ftUS)", 3617: "NAD83(NSRS2007) / New Mexico Central", 3618: "NAD83(NSRS2007) / New Mexico Central (ftUS)", 3619: "NAD83(NSRS2007) / New Mexico East", 3620: "NAD83(NSRS2007) / New Mexico East (ftUS)", 3621: "NAD83(NSRS2007) / New Mexico West", 3622: "NAD83(NSRS2007) / New Mexico West (ftUS)", 3623: "NAD83(NSRS2007) / New York Central", 3624: "NAD83(NSRS2007) / New York Central (ftUS)", 3625: "NAD83(NSRS2007) / New York East", 3626: "NAD83(NSRS2007) / New York East (ftUS)", 3627: "NAD83(NSRS2007) / New York Long Island", 3628: "NAD83(NSRS2007) / New York Long Island (ftUS)", 3629: "NAD83(NSRS2007) / New York West", 3630: "NAD83(NSRS2007) / New York West (ftUS)", 3631: "NAD83(NSRS2007) / North Carolina", 3632: "NAD83(NSRS2007) / North Carolina (ftUS)", 3633: "NAD83(NSRS2007) / North Dakota North", 3634: "NAD83(NSRS2007) / North Dakota North (ft)", 3635: "NAD83(NSRS2007) / North Dakota South", 3636: "NAD83(NSRS2007) / North Dakota South (ft)", 3637: "NAD83(NSRS2007) / Ohio North", 3638: "NAD83(NSRS2007) / Ohio South", 3639: "NAD83(NSRS2007) / Oklahoma North", 3640: "NAD83(NSRS2007) / Oklahoma North (ftUS)", 3641: "NAD83(NSRS2007) / Oklahoma South", 3642: "NAD83(NSRS2007) / Oklahoma South (ftUS)", 3643: "NAD83(NSRS2007) / Oregon LCC (m)", 3644: "NAD83(NSRS2007) / Oregon GIC Lambert (ft)", 3645: "NAD83(NSRS2007) / Oregon North", 3646: "NAD83(NSRS2007) / Oregon North (ft)", 3647: "NAD83(NSRS2007) / Oregon South", 3648: "NAD83(NSRS2007) / Oregon South (ft)", 3649: "NAD83(NSRS2007) / Pennsylvania North", 3650: "NAD83(NSRS2007) / Pennsylvania North (ftUS)", 3651: "NAD83(NSRS2007) / Pennsylvania South", 3652: "NAD83(NSRS2007) / Pennsylvania South (ftUS)", 3653: "NAD83(NSRS2007) / Rhode Island", 3654: "NAD83(NSRS2007) / Rhode Island (ftUS)", 3655: "NAD83(NSRS2007) / South Carolina", 3656: "NAD83(NSRS2007) / South Carolina (ft)", 3657: "NAD83(NSRS2007) / South Dakota North", 3658: "NAD83(NSRS2007) / South Dakota North (ftUS)", 3659: "NAD83(NSRS2007) / South Dakota South", 3660: "NAD83(NSRS2007) / South Dakota South (ftUS)", 3661: "NAD83(NSRS2007) / Tennessee", 3662: "NAD83(NSRS2007) / Tennessee (ftUS)", 3663: "NAD83(NSRS2007) / Texas Central", 3664: "NAD83(NSRS2007) / Texas Central (ftUS)", 3665: "NAD83(NSRS2007) / Texas Centric Albers Equal Area", 3666: "NAD83(NSRS2007) / Texas Centric Lambert Conformal", 3667: "NAD83(NSRS2007) / Texas North", 3668: "NAD83(NSRS2007) / Texas North (ftUS)", 3669: "NAD83(NSRS2007) / Texas North Central", 3670: "NAD83(NSRS2007) / Texas North Central (ftUS)", 3671: "NAD83(NSRS2007) / Texas South", 3672: "NAD83(NSRS2007) / Texas South (ftUS)", 3673: "NAD83(NSRS2007) / Texas South Central", 3674: "NAD83(NSRS2007) / Texas South Central (ftUS)", 3675: "NAD83(NSRS2007) / Utah Central", 3676: "NAD83(NSRS2007) / Utah Central (ft)", 3677: "NAD83(NSRS2007) / Utah Central (ftUS)", 3678: "NAD83(NSRS2007) / Utah North", 3679: "NAD83(NSRS2007) / Utah North (ft)", 3680: "NAD83(NSRS2007) / Utah North (ftUS)", 3681: "NAD83(NSRS2007) / Utah South", 3682: "NAD83(NSRS2007) / Utah South (ft)", 3683: "NAD83(NSRS2007) / Utah South (ftUS)", 3684: "NAD83(NSRS2007) / Vermont", 3685: "NAD83(NSRS2007) / Virginia North", 3686: "NAD83(NSRS2007) / Virginia North (ftUS)", 3687: "NAD83(NSRS2007) / Virginia South", 3688: "NAD83(NSRS2007) / Virginia South (ftUS)", 3689: "NAD83(NSRS2007) / Washington North", 3690: "NAD83(NSRS2007) / Washington North (ftUS)", 3691: "NAD83(NSRS2007) / Washington South", 3692: "NAD83(NSRS2007) / Washington South (ftUS)", 3693: "NAD83(NSRS2007) / West Virginia North", 3694: "NAD83(NSRS2007) / West Virginia South", 3695: "NAD83(NSRS2007) / Wisconsin Central", 3696: "NAD83(NSRS2007) / Wisconsin Central (ftUS)", 3697: "NAD83(NSRS2007) / Wisconsin North", 3698: "NAD83(NSRS2007) / Wisconsin North (ftUS)", 3699: "NAD83(NSRS2007) / Wisconsin South", 3700: "NAD83(NSRS2007) / Wisconsin South (ftUS)", 3701: "NAD83(NSRS2007) / Wisconsin Transverse Mercator", 3702: "NAD83(NSRS2007) / Wyoming East", 3703: "NAD83(NSRS2007) / Wyoming East Central", 3704: "NAD83(NSRS2007) / Wyoming West Central", 3705: "NAD83(NSRS2007) / Wyoming West", 3706: "NAD83(NSRS2007) / UTM zone 59N", 3707: "NAD83(NSRS2007) / UTM zone 60N", 3708: "NAD83(NSRS2007) / UTM zone 1N", 3709: "NAD83(NSRS2007) / UTM zone 2N", 3710: "NAD83(NSRS2007) / UTM zone 3N", 3711: "NAD83(NSRS2007) / UTM zone 4N", 3712: "NAD83(NSRS2007) / UTM zone 5N", 3713: "NAD83(NSRS2007) / UTM zone 6N", 3714: "NAD83(NSRS2007) / UTM zone 7N", 3715: "NAD83(NSRS2007) / UTM zone 8N", 3716: "NAD83(NSRS2007) / UTM zone 9N", 3717: "NAD83(NSRS2007) / UTM zone 10N", 3718: "NAD83(NSRS2007) / UTM zone 11N", 3719: "NAD83(NSRS2007) / UTM zone 12N", 3720: "NAD83(NSRS2007) / UTM zone 13N", 3721: "NAD83(NSRS2007) / UTM zone 14N", 3722: "NAD83(NSRS2007) / UTM zone 15N", 3723: "NAD83(NSRS2007) / UTM zone 16N", 3724: "NAD83(NSRS2007) / UTM zone 17N", 3725: "NAD83(NSRS2007) / UTM zone 18N", 3726: "NAD83(NSRS2007) / UTM zone 19N", 3727: "Reunion 1947 / TM Reunion", 3728: "NAD83(NSRS2007) / Ohio North (ftUS)", 3729: "NAD83(NSRS2007) / Ohio South (ftUS)", 3730: "NAD83(NSRS2007) / Wyoming East (ftUS)", 3731: "NAD83(NSRS2007) / Wyoming East Central (ftUS)", 3732: "NAD83(NSRS2007) / Wyoming West Central (ftUS)", 3733: "NAD83(NSRS2007) / Wyoming West (ftUS)", 3734: "NAD83 / Ohio North (ftUS)", 3735: "NAD83 / Ohio South (ftUS)", 3736: "NAD83 / Wyoming East (ftUS)", 3737: "NAD83 / Wyoming East Central (ftUS)", 3738: "NAD83 / Wyoming West Central (ftUS)", 3739: "NAD83 / Wyoming West (ftUS)", 3740: "NAD83(HARN) / UTM zone 10N", 3741: "NAD83(HARN) / UTM zone 11N", 3742: "NAD83(HARN) / UTM zone 12N", 3743: "NAD83(HARN) / UTM zone 13N", 3744: "NAD83(HARN) / UTM zone 14N", 3745: "NAD83(HARN) / UTM zone 15N", 3746: "NAD83(HARN) / UTM zone 16N", 3747: "NAD83(HARN) / UTM zone 17N", 3748: "NAD83(HARN) / UTM zone 18N", 3749: "NAD83(HARN) / UTM zone 19N", 3750: "NAD83(HARN) / UTM zone 4N", 3751: "NAD83(HARN) / UTM zone 5N", 3752: "WGS 84 / Mercator 41", 3753: "NAD83(HARN) / Ohio North (ftUS)", 3754: "NAD83(HARN) / Ohio South (ftUS)", 3755: "NAD83(HARN) / Wyoming East (ftUS)", 3756: "NAD83(HARN) / Wyoming East Central (ftUS)", 3757: "NAD83(HARN) / Wyoming West Central (ftUS)", 3758: "NAD83(HARN) / Wyoming West (ftUS)", 3759: "NAD83 / Hawaii zone 3 (ftUS)", 3760: "NAD83(HARN) / Hawaii zone 3 (ftUS)", 3761: "NAD83(CSRS) / UTM zone 22N", 3762: "WGS 84 / South Georgia Lambert", 3763: "ETRS89 / Portugal TM06", 3764: "NZGD2000 / Chatham Island Circuit 2000", 3765: "HTRS96 / Croatia TM", 3766: "HTRS96 / Croatia LCC", 3767: "HTRS96 / UTM zone 33N", 3768: "HTRS96 / UTM zone 34N", 3769: "Bermuda 1957 / UTM zone 20N", 3770: "BDA2000 / Bermuda 2000 National Grid", 3771: "NAD27 / Alberta 3TM ref merid 111 W", 3772: "NAD27 / Alberta 3TM ref merid 114 W", 3773: "NAD27 / Alberta 3TM ref merid 117 W", 3774: "NAD27 / Alberta 3TM ref merid 120 W", 3775: "NAD83 / Alberta 3TM ref merid 111 W", 3776: "NAD83 / Alberta 3TM ref merid 114 W", 3777: "NAD83 / Alberta 3TM ref merid 117 W", 3778: "NAD83 / Alberta 3TM ref merid 120 W", 3779: "NAD83(CSRS) / Alberta 3TM ref merid 111 W", 3780: "NAD83(CSRS) / Alberta 3TM ref merid 114 W", 3781: "NAD83(CSRS) / Alberta 3TM ref merid 117 W", 3782: "NAD83(CSRS) / Alberta 3TM ref merid 120 W", 3783: "Pitcairn 2006 / Pitcairn TM 2006", 3784: "Pitcairn 1967 / UTM zone 9S", 3785: "Popular Visualisation CRS / Mercator", 3786: "World Equidistant Cylindrical (Sphere)", 3787: "MGI / Slovene National Grid", 3788: "NZGD2000 / Auckland Islands TM 2000", 3789: "NZGD2000 / Campbell Island TM 2000", 3790: "NZGD2000 / Antipodes Islands TM 2000", 3791: "NZGD2000 / Raoul Island TM 2000", 3793: "NZGD2000 / Chatham Islands TM 2000", 3794: "Slovenia 1996 / Slovene National Grid", 3795: "NAD27 / Cuba Norte", 3796: "NAD27 / Cuba Sur", 3797: "NAD27 / MTQ Lambert", 3798: "NAD83 / MTQ Lambert", 3799: "NAD83(CSRS) / MTQ Lambert", 3800: "NAD27 / Alberta 3TM ref merid 120 W", 3801: "NAD83 / Alberta 3TM ref merid 120 W", 3802: "NAD83(CSRS) / Alberta 3TM ref merid 120 W", 3812: "ETRS89 / Belgian Lambert 2008", 3814: "NAD83 / Mississippi TM", 3815: "NAD83(HARN) / Mississippi TM", 3816: "NAD83(NSRS2007) / Mississippi TM", 3825: "TWD97 / TM2 zone 119", 3826: "TWD97 / TM2 zone 121", 3827: "TWD67 / TM2 zone 119", 3828: "TWD67 / TM2 zone 121", 3829: "Hu Tzu Shan 1950 / UTM zone 51N", 3832: "WGS 84 / PDC Mercator", 3833: "Pulkovo 1942(58) / Gauss-Kruger zone 2", 3834: "Pulkovo 1942(83) / Gauss-Kruger zone 2", 3835: "Pulkovo 1942(83) / Gauss-Kruger zone 3", 3836: "Pulkovo 1942(83) / Gauss-Kruger zone 4", 3837: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 3", 3838: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 4", 3839: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 9", 3840: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 10", 3841: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 6", 3842: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 7", 3843: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 8", 3844: "Pulkovo 1942(58) / Stereo70", 3845: "SWEREF99 / RT90 7.5 gon V emulation", 3846: "SWEREF99 / RT90 5 gon V emulation", 3847: "SWEREF99 / RT90 2.5 gon V emulation", 3848: "SWEREF99 / RT90 0 gon emulation", 3849: "SWEREF99 / RT90 2.5 gon O emulation", 3850: "SWEREF99 / RT90 5 gon O emulation", 3851: "NZGD2000 / NZCS2000", 3852: "RSRGD2000 / DGLC2000", 3854: "County ST74", 3857: "WGS 84 / Pseudo-Mercator", 3873: "ETRS89 / GK19FIN", 3874: "ETRS89 / GK20FIN", 3875: "ETRS89 / GK21FIN", 3876: "ETRS89 / GK22FIN", 3877: "ETRS89 / GK23FIN", 3878: "ETRS89 / GK24FIN", 3879: "ETRS89 / GK25FIN", 3880: "ETRS89 / GK26FIN", 3881: "ETRS89 / GK27FIN", 3882: "ETRS89 / GK28FIN", 3883: "ETRS89 / GK29FIN", 3884: "ETRS89 / GK30FIN", 3885: "ETRS89 / GK31FIN", 3890: "IGRS / UTM zone 37N", 3891: "IGRS / UTM zone 38N", 3892: "IGRS / UTM zone 39N", 3893: "ED50 / Iraq National Grid", 3907: "MGI 1901 / Balkans zone 5", 3908: "MGI 1901 / Balkans zone 6", 3909: "MGI 1901 / Balkans zone 7", 3910: "MGI 1901 / Balkans zone 8", 3911: "MGI 1901 / Slovenia Grid", 3912: "MGI 1901 / Slovene National Grid", 3920: "Puerto Rico / UTM zone 20N", 3942: "RGF93 / CC42", 3943: "RGF93 / CC43", 3944: "RGF93 / CC44", 3945: "RGF93 / CC45", 3946: "RGF93 / CC46", 3947: "RGF93 / CC47", 3948: "RGF93 / CC48", 3949: "RGF93 / CC49", 3950: "RGF93 / CC50", 3968: "NAD83 / Virginia Lambert", 3969: "NAD83(HARN) / Virginia Lambert", 3970: "NAD83(NSRS2007) / Virginia Lambert", 3973: "WGS 84 / NSIDC EASE-Grid North", 3974: "WGS 84 / NSIDC EASE-Grid South", 3975: "WGS 84 / NSIDC EASE-Grid Global", 3976: "WGS 84 / NSIDC Sea Ice Polar Stereographic South", 3978: "NAD83 / Canada Atlas Lambert", 3979: "NAD83(CSRS) / Canada Atlas Lambert", 3985: "Katanga 1955 / Katanga Lambert", 3986: "Katanga 1955 / Katanga Gauss zone A", 3987: "Katanga 1955 / Katanga Gauss zone B", 3988: "Katanga 1955 / Katanga Gauss zone C", 3989: "Katanga 1955 / Katanga Gauss zone D", 3991: "Puerto Rico State Plane CS of 1927", 3992: "Puerto Rico / St. Croix", 3993: "Guam 1963 / Guam SPCS", 3994: "WGS 84 / Mercator 41", 3995: "WGS 84 / Arctic Polar Stereographic", 3996: "WGS 84 / IBCAO Polar Stereographic", 3997: "WGS 84 / Dubai Local TM", 4026: "MOLDREF99 / Moldova TM", 4037: "WGS 84 / TMzn35N", 4038: "WGS 84 / TMzn36N", 4048: "RGRDC 2005 / Congo TM zone 12", 4049: "RGRDC 2005 / Congo TM zone 14", 4050: "RGRDC 2005 / Congo TM zone 16", 4051: "RGRDC 2005 / Congo TM zone 18", 4056: "RGRDC 2005 / Congo TM zone 20", 4057: "RGRDC 2005 / Congo TM zone 22", 4058: "RGRDC 2005 / Congo TM zone 24", 4059: "RGRDC 2005 / Congo TM zone 26", 4060: "RGRDC 2005 / Congo TM zone 28", 4061: "RGRDC 2005 / UTM zone 33S", 4062: "RGRDC 2005 / UTM zone 34S", 4063: "RGRDC 2005 / UTM zone 35S", 4071: "Chua / UTM zone 23S", 4082: "REGCAN95 / UTM zone 27N", 4083: "REGCAN95 / UTM zone 28N", 4087: "WGS 84 / World Equidistant Cylindrical", 4088: "World Equidistant Cylindrical (Sphere)", 4093: "ETRS89 / DKTM1", 4094: "ETRS89 / DKTM2", 4095: "ETRS89 / DKTM3", 4096: "ETRS89 / DKTM4", 4217: "NAD83 / BLM 59N (ftUS)", 4390: "Kertau 1968 / Johor Grid", 4391: "Kertau 1968 / Sembilan and Melaka Grid", 4392: "Kertau 1968 / Pahang Grid", 4393: "Kertau 1968 / Selangor Grid", 4394: "Kertau 1968 / Terengganu Grid", 4395: "Kertau 1968 / Pinang Grid", 4396: "Kertau 1968 / Kedah and Perlis Grid", 4397: "Kertau 1968 / Perak Revised Grid", 4398: "Kertau 1968 / Kelantan Grid", 4399: "NAD27 / BLM 59N (ftUS)", 4400: "NAD27 / BLM 60N (ftUS)", 4401: "NAD27 / BLM 1N (ftUS)", 4402: "NAD27 / BLM 2N (ftUS)", 4403: "NAD27 / BLM 3N (ftUS)", 4404: "NAD27 / BLM 4N (ftUS)", 4405: "NAD27 / BLM 5N (ftUS)", 4406: "NAD27 / BLM 6N (ftUS)", 4407: "NAD27 / BLM 7N (ftUS)", 4408: "NAD27 / BLM 8N (ftUS)", 4409: "NAD27 / BLM 9N (ftUS)", 4410: "NAD27 / BLM 10N (ftUS)", 4411: "NAD27 / BLM 11N (ftUS)", 4412: "NAD27 / BLM 12N (ftUS)", 4413: "NAD27 / BLM 13N (ftUS)", 4414: "NAD83(HARN) / Guam Map Grid", 4415: "Katanga 1955 / Katanga Lambert", 4417: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 7", 4418: "NAD27 / BLM 18N (ftUS)", 4419: "NAD27 / BLM 19N (ftUS)", 4420: "NAD83 / BLM 60N (ftUS)", 4421: "NAD83 / BLM 1N (ftUS)", 4422: "NAD83 / BLM 2N (ftUS)", 4423: "NAD83 / BLM 3N (ftUS)", 4424: "NAD83 / BLM 4N (ftUS)", 4425: "NAD83 / BLM 5N (ftUS)", 4426: "NAD83 / BLM 6N (ftUS)", 4427: "NAD83 / BLM 7N (ftUS)", 4428: "NAD83 / BLM 8N (ftUS)", 4429: "NAD83 / BLM 9N (ftUS)", 4430: "NAD83 / BLM 10N (ftUS)", 4431: "NAD83 / BLM 11N (ftUS)", 4432: "NAD83 / BLM 12N (ftUS)", 4433: "NAD83 / BLM 13N (ftUS)", 4434: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 8", 4437: "NAD83(NSRS2007) / Puerto Rico and Virgin Is.", 4438: "NAD83 / BLM 18N (ftUS)", 4439: "NAD83 / BLM 19N (ftUS)", 4455: "NAD27 / Pennsylvania South", 4456: "NAD27 / New York Long Island", 4457: "NAD83 / South Dakota North (ftUS)", 4462: "WGS 84 / Australian Centre for Remote Sensing Lambert", 4467: "RGSPM06 / UTM zone 21N", 4471: "RGM04 / UTM zone 38S", 4474: "Cadastre 1997 / UTM zone 38S", 4484: "Mexico ITRF92 / UTM zone 11N", 4485: "Mexico ITRF92 / UTM zone 12N", 4486: "Mexico ITRF92 / UTM zone 13N", 4487: "Mexico ITRF92 / UTM zone 14N", 4488: "Mexico ITRF92 / UTM zone 15N", 4489: "Mexico ITRF92 / UTM zone 16N", 4491: "CGCS2000 / Gauss-Kruger zone 13", 4492: "CGCS2000 / Gauss-Kruger zone 14", 4493: "CGCS2000 / Gauss-Kruger zone 15", 4494: "CGCS2000 / Gauss-Kruger zone 16", 4495: "CGCS2000 / Gauss-Kruger zone 17", 4496: "CGCS2000 / Gauss-Kruger zone 18", 4497: "CGCS2000 / Gauss-Kruger zone 19", 4498: "CGCS2000 / Gauss-Kruger zone 20", 4499: "CGCS2000 / Gauss-Kruger zone 21", 4500: "CGCS2000 / Gauss-Kruger zone 22", 4501: "CGCS2000 / Gauss-Kruger zone 23", 4502: "CGCS2000 / Gauss-Kruger CM 75E", 4503: "CGCS2000 / Gauss-Kruger CM 81E", 4504: "CGCS2000 / Gauss-Kruger CM 87E", 4505: "CGCS2000 / Gauss-Kruger CM 93E", 4506: "CGCS2000 / Gauss-Kruger CM 99E", 4507: "CGCS2000 / Gauss-Kruger CM 105E", 4508: "CGCS2000 / Gauss-Kruger CM 111E", 4509: "CGCS2000 / Gauss-Kruger CM 117E", 4510: "CGCS2000 / Gauss-Kruger CM 123E", 4511: "CGCS2000 / Gauss-Kruger CM 129E", 4512: "CGCS2000 / Gauss-Kruger CM 135E", 4513: "CGCS2000 / 3-degree Gauss-Kruger zone 25", 4514: "CGCS2000 / 3-degree Gauss-Kruger zone 26", 4515: "CGCS2000 / 3-degree Gauss-Kruger zone 27", 4516: "CGCS2000 / 3-degree Gauss-Kruger zone 28", 4517: "CGCS2000 / 3-degree Gauss-Kruger zone 29", 4518: "CGCS2000 / 3-degree Gauss-Kruger zone 30", 4519: "CGCS2000 / 3-degree Gauss-Kruger zone 31", 4520: "CGCS2000 / 3-degree Gauss-Kruger zone 32", 4521: "CGCS2000 / 3-degree Gauss-Kruger zone 33", 4522: "CGCS2000 / 3-degree Gauss-Kruger zone 34", 4523: "CGCS2000 / 3-degree Gauss-Kruger zone 35", 4524: "CGCS2000 / 3-degree Gauss-Kruger zone 36", 4525: "CGCS2000 / 3-degree Gauss-Kruger zone 37", 4526: "CGCS2000 / 3-degree Gauss-Kruger zone 38", 4527: "CGCS2000 / 3-degree Gauss-Kruger zone 39", 4528: "CGCS2000 / 3-degree Gauss-Kruger zone 40", 4529: "CGCS2000 / 3-degree Gauss-Kruger zone 41", 4530: "CGCS2000 / 3-degree Gauss-Kruger zone 42", 4531: "CGCS2000 / 3-degree Gauss-Kruger zone 43", 4532: "CGCS2000 / 3-degree Gauss-Kruger zone 44", 4533: "CGCS2000 / 3-degree Gauss-Kruger zone 45", 4534: "CGCS2000 / 3-degree Gauss-Kruger CM 75E", 4535: "CGCS2000 / 3-degree Gauss-Kruger CM 78E", 4536: "CGCS2000 / 3-degree Gauss-Kruger CM 81E", 4537: "CGCS2000 / 3-degree Gauss-Kruger CM 84E", 4538: "CGCS2000 / 3-degree Gauss-Kruger CM 87E", 4539: "CGCS2000 / 3-degree Gauss-Kruger CM 90E", 4540: "CGCS2000 / 3-degree Gauss-Kruger CM 93E", 4541: "CGCS2000 / 3-degree Gauss-Kruger CM 96E", 4542: "CGCS2000 / 3-degree Gauss-Kruger CM 99E", 4543: "CGCS2000 / 3-degree Gauss-Kruger CM 102E", 4544: "CGCS2000 / 3-degree Gauss-Kruger CM 105E", 4545: "CGCS2000 / 3-degree Gauss-Kruger CM 108E", 4546: "CGCS2000 / 3-degree Gauss-Kruger CM 111E", 4547: "CGCS2000 / 3-degree Gauss-Kruger CM 114E", 4548: "CGCS2000 / 3-degree Gauss-Kruger CM 117E", 4549: "CGCS2000 / 3-degree Gauss-Kruger CM 120E", 4550: "CGCS2000 / 3-degree Gauss-Kruger CM 123E", 4551: "CGCS2000 / 3-degree Gauss-Kruger CM 126E", 4552: "CGCS2000 / 3-degree Gauss-Kruger CM 129E", 4553: "CGCS2000 / 3-degree Gauss-Kruger CM 132E", 4554: "CGCS2000 / 3-degree Gauss-Kruger CM 135E", 4559: "RRAF 1991 / UTM zone 20N", 4568: "New Beijing / Gauss-Kruger zone 13", 4569: "New Beijing / Gauss-Kruger zone 14", 4570: "New Beijing / Gauss-Kruger zone 15", 4571: "New Beijing / Gauss-Kruger zone 16", 4572: "New Beijing / Gauss-Kruger zone 17", 4573: "New Beijing / Gauss-Kruger zone 18", 4574: "New Beijing / Gauss-Kruger zone 19", 4575: "New Beijing / Gauss-Kruger zone 20", 4576: "New Beijing / Gauss-Kruger zone 21", 4577: "New Beijing / Gauss-Kruger zone 22", 4578: "New Beijing / Gauss-Kruger zone 23", 4579: "New Beijing / Gauss-Kruger CM 75E", 4580: "New Beijing / Gauss-Kruger CM 81E", 4581: "New Beijing / Gauss-Kruger CM 87E", 4582: "New Beijing / Gauss-Kruger CM 93E", 4583: "New Beijing / Gauss-Kruger CM 99E", 4584: "New Beijing / Gauss-Kruger CM 105E", 4585: "New Beijing / Gauss-Kruger CM 111E", 4586: "New Beijing / Gauss-Kruger CM 117E", 4587: "New Beijing / Gauss-Kruger CM 123E", 4588: "New Beijing / Gauss-Kruger CM 129E", 4589: "New Beijing / Gauss-Kruger CM 135E", 4647: "ETRS89 / UTM zone 32N (zE-N)", 4652: "New Beijing / 3-degree Gauss-Kruger zone 25", 4653: "New Beijing / 3-degree Gauss-Kruger zone 26", 4654: "New Beijing / 3-degree Gauss-Kruger zone 27", 4655: "New Beijing / 3-degree Gauss-Kruger zone 28", 4656: "New Beijing / 3-degree Gauss-Kruger zone 29", 4766: "New Beijing / 3-degree Gauss-Kruger zone 30", 4767: "New Beijing / 3-degree Gauss-Kruger zone 31", 4768: "New Beijing / 3-degree Gauss-Kruger zone 32", 4769: "New Beijing / 3-degree Gauss-Kruger zone 33", 4770: "New Beijing / 3-degree Gauss-Kruger zone 34", 4771: "New Beijing / 3-degree Gauss-Kruger zone 35", 4772: "New Beijing / 3-degree Gauss-Kruger zone 36", 4773: "New Beijing / 3-degree Gauss-Kruger zone 37", 4774: "New Beijing / 3-degree Gauss-Kruger zone 38", 4775: "New Beijing / 3-degree Gauss-Kruger zone 39", 4776: "New Beijing / 3-degree Gauss-Kruger zone 40", 4777: "New Beijing / 3-degree Gauss-Kruger zone 41", 4778: "New Beijing / 3-degree Gauss-Kruger zone 42", 4779: "New Beijing / 3-degree Gauss-Kruger zone 43", 4780: "New Beijing / 3-degree Gauss-Kruger zone 44", 4781: "New Beijing / 3-degree Gauss-Kruger zone 45", 4782: "New Beijing / 3-degree Gauss-Kruger CM 75E", 4783: "New Beijing / 3-degree Gauss-Kruger CM 78E", 4784: "New Beijing / 3-degree Gauss-Kruger CM 81E", 4785: "New Beijing / 3-degree Gauss-Kruger CM 84E", 4786: "New Beijing / 3-degree Gauss-Kruger CM 87E", 4787: "New Beijing / 3-degree Gauss-Kruger CM 90E", 4788: "New Beijing / 3-degree Gauss-Kruger CM 93E", 4789: "New Beijing / 3-degree Gauss-Kruger CM 96E", 4790: "New Beijing / 3-degree Gauss-Kruger CM 99E", 4791: "New Beijing / 3-degree Gauss-Kruger CM 102E", 4792: "New Beijing / 3-degree Gauss-Kruger CM 105E", 4793: "New Beijing / 3-degree Gauss-Kruger CM 108E", 4794: "New Beijing / 3-degree Gauss-Kruger CM 111E", 4795: "New Beijing / 3-degree Gauss-Kruger CM 114E", 4796: "New Beijing / 3-degree Gauss-Kruger CM 117E", 4797: "New Beijing / 3-degree Gauss-Kruger CM 120E", 4798: "New Beijing / 3-degree Gauss-Kruger CM 123E", 4799: "New Beijing / 3-degree Gauss-Kruger CM 126E", 4800: "New Beijing / 3-degree Gauss-Kruger CM 129E", 4812: "New Beijing / 3-degree Gauss-Kruger CM 132E", 4822: "New Beijing / 3-degree Gauss-Kruger CM 135E", 4826: "WGS 84 / Cape Verde National", 4839: "ETRS89 / LCC Germany (N-E)", 4855: "ETRS89 / NTM zone 5", 4856: "ETRS89 / NTM zone 6", 4857: "ETRS89 / NTM zone 7", 4858: "ETRS89 / NTM zone 8", 4859: "ETRS89 / NTM zone 9", 4860: "ETRS89 / NTM zone 10", 4861: "ETRS89 / NTM zone 11", 4862: "ETRS89 / NTM zone 12", 4863: "ETRS89 / NTM zone 13", 4864: "ETRS89 / NTM zone 14", 4865: "ETRS89 / NTM zone 15", 4866: "ETRS89 / NTM zone 16", 4867: "ETRS89 / NTM zone 17", 4868: "ETRS89 / NTM zone 18", 4869: "ETRS89 / NTM zone 19", 4870: "ETRS89 / NTM zone 20", 4871: "ETRS89 / NTM zone 21", 4872: "ETRS89 / NTM zone 22", 4873: "ETRS89 / NTM zone 23", 4874: "ETRS89 / NTM zone 24", 4875: "ETRS89 / NTM zone 25", 4876: "ETRS89 / NTM zone 26", 4877: "ETRS89 / NTM zone 27", 4878: "ETRS89 / NTM zone 28", 4879: "ETRS89 / NTM zone 29", 4880: "ETRS89 / NTM zone 30", 5014: "PTRA08 / UTM zone 25N", 5015: "PTRA08 / UTM zone 26N", 5016: "PTRA08 / UTM zone 28N", 5017: "Lisbon 1890 / Portugal Bonne New", 5018: "Lisbon / Portuguese Grid New", 5041: "WGS 84 / UPS North (E,N)", 5042: "WGS 84 / UPS South (E,N)", 5048: "ETRS89 / TM35FIN(N,E)", 5069: "NAD27 / Conus Albers", 5070: "NAD83 / Conus Albers", 5071: "NAD83(HARN) / Conus Albers", 5072: "NAD83(NSRS2007) / Conus Albers", 5105: "ETRS89 / NTM zone 5", 5106: "ETRS89 / NTM zone 6", 5107: "ETRS89 / NTM zone 7", 5108: "ETRS89 / NTM zone 8", 5109: "ETRS89 / NTM zone 9", 5110: "ETRS89 / NTM zone 10", 5111: "ETRS89 / NTM zone 11", 5112: "ETRS89 / NTM zone 12", 5113: "ETRS89 / NTM zone 13", 5114: "ETRS89 / NTM zone 14", 5115: "ETRS89 / NTM zone 15", 5116: "ETRS89 / NTM zone 16", 5117: "ETRS89 / NTM zone 17", 5118: "ETRS89 / NTM zone 18", 5119: "ETRS89 / NTM zone 19", 5120: "ETRS89 / NTM zone 20", 5121: "ETRS89 / NTM zone 21", 5122: "ETRS89 / NTM zone 22", 5123: "ETRS89 / NTM zone 23", 5124: "ETRS89 / NTM zone 24", 5125: "ETRS89 / NTM zone 25", 5126: "ETRS89 / NTM zone 26", 5127: "ETRS89 / NTM zone 27", 5128: "ETRS89 / NTM zone 28", 5129: "ETRS89 / NTM zone 29", 5130: "ETRS89 / NTM zone 30", 5167: "Korean 1985 / East Sea Belt", 5168: "Korean 1985 / Central Belt Jeju", 5169: "Tokyo 1892 / Korea West Belt", 5170: "Tokyo 1892 / Korea Central Belt", 5171: "Tokyo 1892 / Korea East Belt", 5172: "Tokyo 1892 / Korea East Sea Belt", 5173: "Korean 1985 / Modified West Belt", 5174: "Korean 1985 / Modified Central Belt", 5175: "Korean 1985 / Modified Central Belt Jeju", 5176: "Korean 1985 / Modified East Belt", 5177: "Korean 1985 / Modified East Sea Belt", 5178: "Korean 1985 / Unified CS", 5179: "Korea 2000 / Unified CS", 5180: "Korea 2000 / West Belt", 5181: "Korea 2000 / Central Belt", 5182: "Korea 2000 / Central Belt Jeju", 5183: "Korea 2000 / East Belt", 5184: "Korea 2000 / East Sea Belt", 5185: "Korea 2000 / West Belt 2010", 5186: "Korea 2000 / Central Belt 2010", 5187: "Korea 2000 / East Belt 2010", 5188: "Korea 2000 / East Sea Belt 2010", 5221: "S-JTSK (Ferro) / Krovak East North", 5223: "WGS 84 / Gabon TM", 5224: "S-JTSK/05 (Ferro) / Modified Krovak", 5225: "S-JTSK/05 (Ferro) / Modified Krovak East North", 5234: "Kandawala / Sri Lanka Grid", 5235: "SLD99 / Sri Lanka Grid 1999", 5243: "ETRS89 / LCC Germany (E-N)", 5247: "GDBD2009 / Brunei BRSO", 5253: "TUREF / TM27", 5254: "TUREF / TM30", 5255: "TUREF / TM33", 5256: "TUREF / TM36", 5257: "TUREF / TM39", 5258: "TUREF / TM42", 5259: "TUREF / TM45", 5266: "DRUKREF 03 / Bhutan National Grid", 5269: "TUREF / 3-degree Gauss-Kruger zone 9", 5270: "TUREF / 3-degree Gauss-Kruger zone 10", 5271: "TUREF / 3-degree Gauss-Kruger zone 11", 5272: "TUREF / 3-degree Gauss-Kruger zone 12", 5273: "TUREF / 3-degree Gauss-Kruger zone 13", 5274: "TUREF / 3-degree Gauss-Kruger zone 14", 5275: "TUREF / 3-degree Gauss-Kruger zone 15", 5292: "DRUKREF 03 / Bumthang TM", 5293: "DRUKREF 03 / Chhukha TM", 5294: "DRUKREF 03 / Dagana TM", 5295: "DRUKREF 03 / Gasa TM", 5296: "DRUKREF 03 / Ha TM", 5297: "DRUKREF 03 / Lhuentse TM", 5298: "DRUKREF 03 / Mongar TM", 5299: "DRUKREF 03 / Paro TM", 5300: "DRUKREF 03 / Pemagatshel TM", 5301: "DRUKREF 03 / Punakha TM", 5302: "DRUKREF 03 / Samdrup Jongkhar TM", 5303: "DRUKREF 03 / Samtse TM", 5304: "DRUKREF 03 / Sarpang TM", 5305: "DRUKREF 03 / Thimphu TM", 5306: "DRUKREF 03 / Trashigang TM", 5307: "DRUKREF 03 / Trongsa TM", 5308: "DRUKREF 03 / Tsirang TM", 5309: "DRUKREF 03 / Wangdue Phodrang TM", 5310: "DRUKREF 03 / Yangtse TM", 5311: "DRUKREF 03 / Zhemgang TM", 5316: "ETRS89 / Faroe TM", 5320: "NAD83 / Teranet Ontario Lambert", 5321: "NAD83(CSRS) / Teranet Ontario Lambert", 5325: "ISN2004 / Lambert 2004", 5329: "Segara (Jakarta) / NEIEZ", 5330: "Batavia (Jakarta) / NEIEZ", 5331: "Makassar (Jakarta) / NEIEZ", 5337: "Aratu / UTM zone 25S", 5343: "POSGAR 2007 / Argentina 1", 5344: "POSGAR 2007 / Argentina 2", 5345: "POSGAR 2007 / Argentina 3", 5346: "POSGAR 2007 / Argentina 4", 5347: "POSGAR 2007 / Argentina 5", 5348: "POSGAR 2007 / Argentina 6", 5349: "POSGAR 2007 / Argentina 7", 5355: "MARGEN / UTM zone 20S", 5356: "MARGEN / UTM zone 19S", 5357: "MARGEN / UTM zone 21S", 5361: "SIRGAS-Chile / UTM zone 19S", 5362: "SIRGAS-Chile / UTM zone 18S", 5367: "CR05 / CRTM05", 5382: "SIRGAS-ROU98 / UTM zone 21S", 5383: "SIRGAS-ROU98 / UTM zone 22S", 5387: "Peru96 / UTM zone 18S", 5388: "Peru96 / UTM zone 17S", 5389: "Peru96 / UTM zone 19S", 5396: "SIRGAS 2000 / UTM zone 26S", 5456: "Ocotepeque 1935 / Costa Rica Norte", 5457: "Ocotepeque 1935 / Costa Rica Sur", 5458: "Ocotepeque 1935 / Guatemala Norte", 5459: "Ocotepeque 1935 / Guatemala Sur", 5460: "Ocotepeque 1935 / El Salvador Lambert", 5461: "Ocotepeque 1935 / Nicaragua Norte", 5462: "Ocotepeque 1935 / Nicaragua Sur", 5463: "SAD69 / UTM zone 17N", 5466: "Sibun Gorge 1922 / Colony Grid", 5469: "Panama-Colon 1911 / Panama Lambert", 5472: "Panama-Colon 1911 / Panama Polyconic", 5479: "RSRGD2000 / MSLC2000", 5480: "RSRGD2000 / BCLC2000", 5481: "RSRGD2000 / PCLC2000", 5482: "RSRGD2000 / RSPS2000", 5490: "RGAF09 / UTM zone 20N", 5513: "S-JTSK / Krovak", 5514: "S-JTSK / Krovak East North", 5515: "S-JTSK/05 / Modified Krovak", 5516: "S-JTSK/05 / Modified Krovak East North", 5518: "CI1971 / Chatham Islands Map Grid", 5519: "CI1979 / Chatham Islands Map Grid", 5520: "DHDN / 3-degree Gauss-Kruger zone 1", 5523: "WGS 84 / Gabon TM 2011", 5530: "SAD69(96) / Brazil Polyconic", 5531: "SAD69(96) / UTM zone 21S", 5532: "SAD69(96) / UTM zone 22S", 5533: "SAD69(96) / UTM zone 23S", 5534: "SAD69(96) / UTM zone 24S", 5535: "SAD69(96) / UTM zone 25S", 5536: "Corrego Alegre 1961 / UTM zone 21S", 5537: "Corrego Alegre 1961 / UTM zone 22S", 5538: "Corrego Alegre 1961 / UTM zone 23S", 5539: "Corrego Alegre 1961 / UTM zone 24S", 5550: "PNG94 / PNGMG94 zone 54", 5551: "PNG94 / PNGMG94 zone 55", 5552: "PNG94 / PNGMG94 zone 56", 5559: "Ocotepeque 1935 / Guatemala Norte", 5562: "UCS-2000 / Gauss-Kruger zone 4", 5563: "UCS-2000 / Gauss-Kruger zone 5", 5564: "UCS-2000 / Gauss-Kruger zone 6", 5565: "UCS-2000 / Gauss-Kruger zone 7", 5566: "UCS-2000 / Gauss-Kruger CM 21E", 5567: "UCS-2000 / Gauss-Kruger CM 27E", 5568: "UCS-2000 / Gauss-Kruger CM 33E", 5569: "UCS-2000 / Gauss-Kruger CM 39E", 5570: "UCS-2000 / 3-degree Gauss-Kruger zone 7", 5571: "UCS-2000 / 3-degree Gauss-Kruger zone 8", 5572: "UCS-2000 / 3-degree Gauss-Kruger zone 9", 5573: "UCS-2000 / 3-degree Gauss-Kruger zone 10", 5574: "UCS-2000 / 3-degree Gauss-Kruger zone 11", 5575: "UCS-2000 / 3-degree Gauss-Kruger zone 12", 5576: "UCS-2000 / 3-degree Gauss-Kruger zone 13", 5577: "UCS-2000 / 3-degree Gauss-Kruger CM 21E", 5578: "UCS-2000 / 3-degree Gauss-Kruger CM 24E", 5579: "UCS-2000 / 3-degree Gauss-Kruger CM 27E", 5580: "UCS-2000 / 3-degree Gauss-Kruger CM 30E", 5581: "UCS-2000 / 3-degree Gauss-Kruger CM 33E", 5582: "UCS-2000 / 3-degree Gauss-Kruger CM 36E", 5583: "UCS-2000 / 3-degree Gauss-Kruger CM 39E", 5588: "NAD27 / New Brunswick Stereographic (NAD27)", 5589: "Sibun Gorge 1922 / Colony Grid", 5596: "FEH2010 / Fehmarnbelt TM", 5623: "NAD27 / Michigan East", 5624: "NAD27 / Michigan Old Central", 5625: "NAD27 / Michigan West", 5627: "ED50 / TM 6 NE", 5629: "Moznet / UTM zone 38S", 5631: "Pulkovo 1942(58) / Gauss-Kruger zone 2 (E-N)", 5632: "PTRA08 / LCC Europe", 5633: "PTRA08 / LAEA Europe", 5634: "REGCAN95 / LCC Europe", 5635: "REGCAN95 / LAEA Europe", 5636: "TUREF / LAEA Europe", 5637: "TUREF / LCC Europe", 5638: "ISN2004 / LAEA Europe", 5639: "ISN2004 / LCC Europe", 5641: "SIRGAS 2000 / Brazil Mercator", 5643: "ED50 / SPBA LCC", 5644: "RGR92 / UTM zone 39S", 5646: "NAD83 / Vermont (ftUS)", 5649: "ETRS89 / UTM zone 31N (zE-N)", 5650: "ETRS89 / UTM zone 33N (zE-N)", 5651: "ETRS89 / UTM zone 31N (N-zE)", 5652: "ETRS89 / UTM zone 32N (N-zE)", 5653: "ETRS89 / UTM zone 33N (N-zE)", 5654: "NAD83(HARN) / Vermont (ftUS)", 5655: "NAD83(NSRS2007) / Vermont (ftUS)", 5659: "Monte Mario / TM Emilia-Romagna", 5663: "Pulkovo 1942(58) / Gauss-Kruger zone 3 (E-N)", 5664: "Pulkovo 1942(83) / Gauss-Kruger zone 2 (E-N)", 5665: "Pulkovo 1942(83) / Gauss-Kruger zone 3 (E-N)", 5666: "PD/83 / 3-degree Gauss-Kruger zone 3 (E-N)", 5667: "PD/83 / 3-degree Gauss-Kruger zone 4 (E-N)", 5668: "RD/83 / 3-degree Gauss-Kruger zone 4 (E-N)", 5669: "RD/83 / 3-degree Gauss-Kruger zone 5 (E-N)", 5670: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 3 (E-N)", 5671: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 4 (E-N)", 5672: "Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 5 (E-N)", 5673: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 3 (E-N)", 5674: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 4 (E-N)", 5675: "Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 5 (E-N)", 5676: "DHDN / 3-degree Gauss-Kruger zone 2 (E-N)", 5677: "DHDN / 3-degree Gauss-Kruger zone 3 (E-N)", 5678: "DHDN / 3-degree Gauss-Kruger zone 4 (E-N)", 5679: "DHDN / 3-degree Gauss-Kruger zone 5 (E-N)", 5680: "DHDN / 3-degree Gauss-Kruger zone 1 (E-N)", 5682: "DB_REF / 3-degree Gauss-Kruger zone 2 (E-N)", 5683: "DB_REF / 3-degree Gauss-Kruger zone 3 (E-N)", 5684: "DB_REF / 3-degree Gauss-Kruger zone 4 (E-N)", 5685: "DB_REF / 3-degree Gauss-Kruger zone 5 (E-N)", 5700: "NZGD2000 / UTM zone 1S", 5819: "EPSG topocentric example A", 5820: "EPSG topocentric example B", 5821: "EPSG vertical perspective example", 5825: "AGD66 / ACT Standard Grid", 5836: "Yemen NGN96 / UTM zone 37N", 5837: "Yemen NGN96 / UTM zone 40N", 5839: "Peru96 / UTM zone 17S", 5842: "WGS 84 / TM 12 SE", 5844: "RGRDC 2005 / Congo TM zone 30", 5858: "SAD69(96) / UTM zone 22S", 5875: "SAD69(96) / UTM zone 18S", 5876: "SAD69(96) / UTM zone 19S", 5877: "SAD69(96) / UTM zone 20S", 5879: "Cadastre 1997 / UTM zone 38S", 5880: "SIRGAS 2000 / Brazil Polyconic", 5887: "TGD2005 / Tonga Map Grid", 5890: "JAXA Snow Depth Polar Stereographic North", 5921: "WGS 84 / EPSG Arctic Regional zone A1", 5922: "WGS 84 / EPSG Arctic Regional zone A2", 5923: "WGS 84 / EPSG Arctic Regional zone A3", 5924: "WGS 84 / EPSG Arctic Regional zone A4", 5925: "WGS 84 / EPSG Arctic Regional zone A5", 5926: "WGS 84 / EPSG Arctic Regional zone B1", 5927: "WGS 84 / EPSG Arctic Regional zone B2", 5928: "WGS 84 / EPSG Arctic Regional zone B3", 5929: "WGS 84 / EPSG Arctic Regional zone B4", 5930: "WGS 84 / EPSG Arctic Regional zone B5", 5931: "WGS 84 / EPSG Arctic Regional zone C1", 5932: "WGS 84 / EPSG Arctic Regional zone C2", 5933: "WGS 84 / EPSG Arctic Regional zone C3", 5934: "WGS 84 / EPSG Arctic Regional zone C4", 5935: "WGS 84 / EPSG Arctic Regional zone C5", 5936: "WGS 84 / EPSG Alaska Polar Stereographic", 5937: "WGS 84 / EPSG Canada Polar Stereographic", 5938: "WGS 84 / EPSG Greenland Polar Stereographic", 5939: "WGS 84 / EPSG Norway Polar Stereographic", 5940: "WGS 84 / EPSG Russia Polar Stereographic", 6050: "GR96 / EPSG Arctic zone 1-25", 6051: "GR96 / EPSG Arctic zone 2-18", 6052: "GR96 / EPSG Arctic zone 2-20", 6053: "GR96 / EPSG Arctic zone 3-29", 6054: "GR96 / EPSG Arctic zone 3-31", 6055: "GR96 / EPSG Arctic zone 3-33", 6056: "GR96 / EPSG Arctic zone 4-20", 6057: "GR96 / EPSG Arctic zone 4-22", 6058: "GR96 / EPSG Arctic zone 4-24", 6059: "GR96 / EPSG Arctic zone 5-41", 6060: "GR96 / EPSG Arctic zone 5-43", 6061: "GR96 / EPSG Arctic zone 5-45", 6062: "GR96 / EPSG Arctic zone 6-26", 6063: "GR96 / EPSG Arctic zone 6-28", 6064: "GR96 / EPSG Arctic zone 6-30", 6065: "GR96 / EPSG Arctic zone 7-11", 6066: "GR96 / EPSG Arctic zone 7-13", 6067: "GR96 / EPSG Arctic zone 8-20", 6068: "GR96 / EPSG Arctic zone 8-22", 6069: "ETRS89 / EPSG Arctic zone 2-22", 6070: "ETRS89 / EPSG Arctic zone 3-11", 6071: "ETRS89 / EPSG Arctic zone 4-26", 6072: "ETRS89 / EPSG Arctic zone 4-28", 6073: "ETRS89 / EPSG Arctic zone 5-11", 6074: "ETRS89 / EPSG Arctic zone 5-13", 6075: "WGS 84 / EPSG Arctic zone 2-24", 6076: "WGS 84 / EPSG Arctic zone 2-26", 6077: "WGS 84 / EPSG Arctic zone 3-13", 6078: "WGS 84 / EPSG Arctic zone 3-15", 6079: "WGS 84 / EPSG Arctic zone 3-17", 6080: "WGS 84 / EPSG Arctic zone 3-19", 6081: "WGS 84 / EPSG Arctic zone 4-30", 6082: "WGS 84 / EPSG Arctic zone 4-32", 6083: "WGS 84 / EPSG Arctic zone 4-34", 6084: "WGS 84 / EPSG Arctic zone 4-36", 6085: "WGS 84 / EPSG Arctic zone 4-38", 6086: "WGS 84 / EPSG Arctic zone 4-40", 6087: "WGS 84 / EPSG Arctic zone 5-15", 6088: "WGS 84 / EPSG Arctic zone 5-17", 6089: "WGS 84 / EPSG Arctic zone 5-19", 6090: "WGS 84 / EPSG Arctic zone 5-21", 6091: "WGS 84 / EPSG Arctic zone 5-23", 6092: "WGS 84 / EPSG Arctic zone 5-25", 6093: "WGS 84 / EPSG Arctic zone 5-27", 6094: "NAD83(NSRS2007) / EPSG Arctic zone 5-29", 6095: "NAD83(NSRS2007) / EPSG Arctic zone 5-31", 6096: "NAD83(NSRS2007) / EPSG Arctic zone 6-14", 6097: "NAD83(NSRS2007) / EPSG Arctic zone 6-16", 6098: "NAD83(CSRS) / EPSG Arctic zone 1-23", 6099: "NAD83(CSRS) / EPSG Arctic zone 2-14", 6100: "NAD83(CSRS) / EPSG Arctic zone 2-16", 6101: "NAD83(CSRS) / EPSG Arctic zone 3-25", 6102: "NAD83(CSRS) / EPSG Arctic zone 3-27", 6103: "NAD83(CSRS) / EPSG Arctic zone 3-29", 6104: "NAD83(CSRS) / EPSG Arctic zone 4-14", 6105: "NAD83(CSRS) / EPSG Arctic zone 4-16", 6106: "NAD83(CSRS) / EPSG Arctic zone 4-18", 6107: "NAD83(CSRS) / EPSG Arctic zone 5-33", 6108: "NAD83(CSRS) / EPSG Arctic zone 5-35", 6109: "NAD83(CSRS) / EPSG Arctic zone 5-37", 6110: "NAD83(CSRS) / EPSG Arctic zone 5-39", 6111: "NAD83(CSRS) / EPSG Arctic zone 6-18", 6112: "NAD83(CSRS) / EPSG Arctic zone 6-20", 6113: "NAD83(CSRS) / EPSG Arctic zone 6-22", 6114: "NAD83(CSRS) / EPSG Arctic zone 6-24", 6115: "WGS 84 / EPSG Arctic zone 1-27", 6116: "WGS 84 / EPSG Arctic zone 1-29", 6117: "WGS 84 / EPSG Arctic zone 1-31", 6118: "WGS 84 / EPSG Arctic zone 1-21", 6119: "WGS 84 / EPSG Arctic zone 2-28", 6120: "WGS 84 / EPSG Arctic zone 2-10", 6121: "WGS 84 / EPSG Arctic zone 2-12", 6122: "WGS 84 / EPSG Arctic zone 3-21", 6123: "WGS 84 / EPSG Arctic zone 3-23", 6124: "WGS 84 / EPSG Arctic zone 4-12", 6125: "ETRS89 / EPSG Arctic zone 5-47", 6128: "Grand Cayman National Grid 1959", 6129: "Sister Islands National Grid 1961", 6141: "Cayman Islands National Grid 2011", 6200: "NAD27 / Michigan North", 6201: "NAD27 / Michigan Central", 6202: "NAD27 / Michigan South", 6204: "Macedonia State Coordinate System", 6210: "SIRGAS 2000 / UTM zone 23N", 6211: "SIRGAS 2000 / UTM zone 24N", 6244: "MAGNA-SIRGAS / Arauca urban grid", 6245: "MAGNA-SIRGAS / Armenia urban grid", 6246: "MAGNA-SIRGAS / Barranquilla urban grid", 6247: "MAGNA-SIRGAS / Bogota urban grid", 6248: "MAGNA-SIRGAS / Bucaramanga urban grid", 6249: "MAGNA-SIRGAS / Cali urban grid", 6250: "MAGNA-SIRGAS / Cartagena urban grid", 6251: "MAGNA-SIRGAS / Cucuta urban grid", 6252: "MAGNA-SIRGAS / Florencia urban grid", 6253: "MAGNA-SIRGAS / Ibague urban grid", 6254: "MAGNA-SIRGAS / Inirida urban grid", 6255: "MAGNA-SIRGAS / Leticia urban grid", 6256: "MAGNA-SIRGAS / Manizales urban grid", 6257: "MAGNA-SIRGAS / Medellin urban grid", 6258: "MAGNA-SIRGAS / Mitu urban grid", 6259: "MAGNA-SIRGAS / Mocoa urban grid", 6260: "MAGNA-SIRGAS / Monteria urban grid", 6261: "MAGNA-SIRGAS / Neiva urban grid", 6262: "MAGNA-SIRGAS / Pasto urban grid", 6263: "MAGNA-SIRGAS / Pereira urban grid", 6264: "MAGNA-SIRGAS / Popayan urban grid", 6265: "MAGNA-SIRGAS / Puerto Carreno urban grid", 6266: "MAGNA-SIRGAS / Quibdo urban grid", 6267: "MAGNA-SIRGAS / Riohacha urban grid", 6268: "MAGNA-SIRGAS / San Andres urban grid", 6269: "MAGNA-SIRGAS / San Jose del Guaviare urban grid", 6270: "MAGNA-SIRGAS / Santa Marta urban grid", 6271: "MAGNA-SIRGAS / Sucre urban grid", 6272: "MAGNA-SIRGAS / Tunja urban grid", 6273: "MAGNA-SIRGAS / Valledupar urban grid", 6274: "MAGNA-SIRGAS / Villavicencio urban grid", 6275: "MAGNA-SIRGAS / Yopal urban grid", 6307: "NAD83(CORS96) / Puerto Rico and Virgin Is.", 6312: "CGRS93 / Cyprus Local Transverse Mercator", 6316: "Macedonia State Coordinate System zone 7", 6328: "NAD83(2011) / UTM zone 59N", 6329: "NAD83(2011) / UTM zone 60N", 6330: "NAD83(2011) / UTM zone 1N", 6331: "NAD83(2011) / UTM zone 2N", 6332: "NAD83(2011) / UTM zone 3N", 6333: "NAD83(2011) / UTM zone 4N", 6334: "NAD83(2011) / UTM zone 5N", 6335: "NAD83(2011) / UTM zone 6N", 6336: "NAD83(2011) / UTM zone 7N", 6337: "NAD83(2011) / UTM zone 8N", 6338: "NAD83(2011) / UTM zone 9N", 6339: "NAD83(2011) / UTM zone 10N", 6340: "NAD83(2011) / UTM zone 11N", 6341: "NAD83(2011) / UTM zone 12N", 6342: "NAD83(2011) / UTM zone 13N", 6343: "NAD83(2011) / UTM zone 14N", 6344: "NAD83(2011) / UTM zone 15N", 6345: "NAD83(2011) / UTM zone 16N", 6346: "NAD83(2011) / UTM zone 17N", 6347: "NAD83(2011) / UTM zone 18N", 6348: "NAD83(2011) / UTM zone 19N", 6350: "NAD83(2011) / Conus Albers", 6351: "NAD83(2011) / EPSG Arctic zone 5-29", 6352: "NAD83(2011) / EPSG Arctic zone 5-31", 6353: "NAD83(2011) / EPSG Arctic zone 6-14", 6354: "NAD83(2011) / EPSG Arctic zone 6-16", 6355: "NAD83(2011) / Alabama East", 6356: "NAD83(2011) / Alabama West", 6362: "Mexico ITRF92 / LCC", 6366: "Mexico ITRF2008 / UTM zone 11N", 6367: "Mexico ITRF2008 / UTM zone 12N", 6368: "Mexico ITRF2008 / UTM zone 13N", 6369: "Mexico ITRF2008 / UTM zone 14N", 6370: "Mexico ITRF2008 / UTM zone 15N", 6371: "Mexico ITRF2008 / UTM zone 16N", 6372: "Mexico ITRF2008 / LCC", 6381: "UCS-2000 / Ukraine TM zone 7", 6382: "UCS-2000 / Ukraine TM zone 8", 6383: "UCS-2000 / Ukraine TM zone 9", 6384: "UCS-2000 / Ukraine TM zone 10", 6385: "UCS-2000 / Ukraine TM zone 11", 6386: "UCS-2000 / Ukraine TM zone 12", 6387: "UCS-2000 / Ukraine TM zone 13", 6391: "Cayman Islands National Grid 2011", 6393: "NAD83(2011) / Alaska Albers", 6394: "NAD83(2011) / Alaska zone 1", 6395: "NAD83(2011) / Alaska zone 2", 6396: "NAD83(2011) / Alaska zone 3", 6397: "NAD83(2011) / Alaska zone 4", 6398: "NAD83(2011) / Alaska zone 5", 6399: "NAD83(2011) / Alaska zone 6", 6400: "NAD83(2011) / Alaska zone 7", 6401: "NAD83(2011) / Alaska zone 8", 6402: "NAD83(2011) / Alaska zone 9", 6403: "NAD83(2011) / Alaska zone 10", 6404: "NAD83(2011) / Arizona Central", 6405: "NAD83(2011) / Arizona Central (ft)", 6406: "NAD83(2011) / Arizona East", 6407: "NAD83(2011) / Arizona East (ft)", 6408: "NAD83(2011) / Arizona West", 6409: "NAD83(2011) / Arizona West (ft)", 6410: "NAD83(2011) / Arkansas North", 6411: "NAD83(2011) / Arkansas North (ftUS)", 6412: "NAD83(2011) / Arkansas South", 6413: "NAD83(2011) / Arkansas South (ftUS)", 6414: "NAD83(2011) / California Albers", 6415: "NAD83(2011) / California zone 1", 6416: "NAD83(2011) / California zone 1 (ftUS)", 6417: "NAD83(2011) / California zone 2", 6418: "NAD83(2011) / California zone 2 (ftUS)", 6419: "NAD83(2011) / California zone 3", 6420: "NAD83(2011) / California zone 3 (ftUS)", 6421: "NAD83(2011) / California zone 4", 6422: "NAD83(2011) / California zone 4 (ftUS)", 6423: "NAD83(2011) / California zone 5", 6424: "NAD83(2011) / California zone 5 (ftUS)", 6425: "NAD83(2011) / California zone 6", 6426: "NAD83(2011) / California zone 6 (ftUS)", 6427: "NAD83(2011) / Colorado Central", 6428: "NAD83(2011) / Colorado Central (ftUS)", 6429: "NAD83(2011) / Colorado North", 6430: "NAD83(2011) / Colorado North (ftUS)", 6431: "NAD83(2011) / Colorado South", 6432: "NAD83(2011) / Colorado South (ftUS)", 6433: "NAD83(2011) / Connecticut", 6434: "NAD83(2011) / Connecticut (ftUS)", 6435: "NAD83(2011) / Delaware", 6436: "NAD83(2011) / Delaware (ftUS)", 6437: "NAD83(2011) / Florida East", 6438: "NAD83(2011) / Florida East (ftUS)", 6439: "NAD83(2011) / Florida GDL Albers", 6440: "NAD83(2011) / Florida North", 6441: "NAD83(2011) / Florida North (ftUS)", 6442: "NAD83(2011) / Florida West", 6443: "NAD83(2011) / Florida West (ftUS)", 6444: "NAD83(2011) / Georgia East", 6445: "NAD83(2011) / Georgia East (ftUS)", 6446: "NAD83(2011) / Georgia West", 6447: "NAD83(2011) / Georgia West (ftUS)", 6448: "NAD83(2011) / Idaho Central", 6449: "NAD83(2011) / Idaho Central (ftUS)", 6450: "NAD83(2011) / Idaho East", 6451: "NAD83(2011) / Idaho East (ftUS)", 6452: "NAD83(2011) / Idaho West", 6453: "NAD83(2011) / Idaho West (ftUS)", 6454: "NAD83(2011) / Illinois East", 6455: "NAD83(2011) / Illinois East (ftUS)", 6456: "NAD83(2011) / Illinois West", 6457: "NAD83(2011) / Illinois West (ftUS)", 6458: "NAD83(2011) / Indiana East", 6459: "NAD83(2011) / Indiana East (ftUS)", 6460: "NAD83(2011) / Indiana West", 6461: "NAD83(2011) / Indiana West (ftUS)", 6462: "NAD83(2011) / Iowa North", 6463: "NAD83(2011) / Iowa North (ftUS)", 6464: "NAD83(2011) / Iowa South", 6465: "NAD83(2011) / Iowa South (ftUS)", 6466: "NAD83(2011) / Kansas North", 6467: "NAD83(2011) / Kansas North (ftUS)", 6468: "NAD83(2011) / Kansas South", 6469: "NAD83(2011) / Kansas South (ftUS)", 6470: "NAD83(2011) / Kentucky North", 6471: "NAD83(2011) / Kentucky North (ftUS)", 6472: "NAD83(2011) / Kentucky Single Zone", 6473: "NAD83(2011) / Kentucky Single Zone (ftUS)", 6474: "NAD83(2011) / Kentucky South", 6475: "NAD83(2011) / Kentucky South (ftUS)", 6476: "NAD83(2011) / Louisiana North", 6477: "NAD83(2011) / Louisiana North (ftUS)", 6478: "NAD83(2011) / Louisiana South", 6479: "NAD83(2011) / Louisiana South (ftUS)", 6480: "NAD83(2011) / Maine CS2000 Central", 6481: "NAD83(2011) / Maine CS2000 East", 6482: "NAD83(2011) / Maine CS2000 West", 6483: "NAD83(2011) / Maine East", 6484: "NAD83(2011) / Maine East (ftUS)", 6485: "NAD83(2011) / Maine West", 6486: "NAD83(2011) / Maine West (ftUS)", 6487: "NAD83(2011) / Maryland", 6488: "NAD83(2011) / Maryland (ftUS)", 6489: "NAD83(2011) / Massachusetts Island", 6490: "NAD83(2011) / Massachusetts Island (ftUS)", 6491: "NAD83(2011) / Massachusetts Mainland", 6492: "NAD83(2011) / Massachusetts Mainland (ftUS)", 6493: "NAD83(2011) / Michigan Central", 6494: "NAD83(2011) / Michigan Central (ft)", 6495: "NAD83(2011) / Michigan North", 6496: "NAD83(2011) / Michigan North (ft)", 6497: "NAD83(2011) / Michigan Oblique Mercator", 6498: "NAD83(2011) / Michigan South", 6499: "NAD83(2011) / Michigan South (ft)", 6500: "NAD83(2011) / Minnesota Central", 6501: "NAD83(2011) / Minnesota Central (ftUS)", 6502: "NAD83(2011) / Minnesota North", 6503: "NAD83(2011) / Minnesota North (ftUS)", 6504: "NAD83(2011) / Minnesota South", 6505: "NAD83(2011) / Minnesota South (ftUS)", 6506: "NAD83(2011) / Mississippi East", 6507: "NAD83(2011) / Mississippi East (ftUS)", 6508: "NAD83(2011) / Mississippi TM", 6509: "NAD83(2011) / Mississippi West", 6510: "NAD83(2011) / Mississippi West (ftUS)", 6511: "NAD83(2011) / Missouri Central", 6512: "NAD83(2011) / Missouri East", 6513: "NAD83(2011) / Missouri West", 6514: "NAD83(2011) / Montana", 6515: "NAD83(2011) / Montana (ft)", 6516: "NAD83(2011) / Nebraska", 6517: "NAD83(2011) / Nebraska (ftUS)", 6518: "NAD83(2011) / Nevada Central", 6519: "NAD83(2011) / Nevada Central (ftUS)", 6520: "NAD83(2011) / Nevada East", 6521: "NAD83(2011) / Nevada East (ftUS)", 6522: "NAD83(2011) / Nevada West", 6523: "NAD83(2011) / Nevada West (ftUS)", 6524: "NAD83(2011) / New Hampshire", 6525: "NAD83(2011) / New Hampshire (ftUS)", 6526: "NAD83(2011) / New Jersey", 6527: "NAD83(2011) / New Jersey (ftUS)", 6528: "NAD83(2011) / New Mexico Central", 6529: "NAD83(2011) / New Mexico Central (ftUS)", 6530: "NAD83(2011) / New Mexico East", 6531: "NAD83(2011) / New Mexico East (ftUS)", 6532: "NAD83(2011) / New Mexico West", 6533: "NAD83(2011) / New Mexico West (ftUS)", 6534: "NAD83(2011) / New York Central", 6535: "NAD83(2011) / New York Central (ftUS)", 6536: "NAD83(2011) / New York East", 6537: "NAD83(2011) / New York East (ftUS)", 6538: "NAD83(2011) / New York Long Island", 6539: "NAD83(2011) / New York Long Island (ftUS)", 6540: "NAD83(2011) / New York West", 6541: "NAD83(2011) / New York West (ftUS)", 6542: "NAD83(2011) / North Carolina", 6543: "NAD83(2011) / North Carolina (ftUS)", 6544: "NAD83(2011) / North Dakota North", 6545: "NAD83(2011) / North Dakota North (ft)", 6546: "NAD83(2011) / North Dakota South", 6547: "NAD83(2011) / North Dakota South (ft)", 6548: "NAD83(2011) / Ohio North", 6549: "NAD83(2011) / Ohio North (ftUS)", 6550: "NAD83(2011) / Ohio South", 6551: "NAD83(2011) / Ohio South (ftUS)", 6552: "NAD83(2011) / Oklahoma North", 6553: "NAD83(2011) / Oklahoma North (ftUS)", 6554: "NAD83(2011) / Oklahoma South", 6555: "NAD83(2011) / Oklahoma South (ftUS)", 6556: "NAD83(2011) / Oregon LCC (m)", 6557: "NAD83(2011) / Oregon GIC Lambert (ft)", 6558: "NAD83(2011) / Oregon North", 6559: "NAD83(2011) / Oregon North (ft)", 6560: "NAD83(2011) / Oregon South", 6561: "NAD83(2011) / Oregon South (ft)", 6562: "NAD83(2011) / Pennsylvania North", 6563: "NAD83(2011) / Pennsylvania North (ftUS)", 6564: "NAD83(2011) / Pennsylvania South", 6565: "NAD83(2011) / Pennsylvania South (ftUS)", 6566: "NAD83(2011) / Puerto Rico and Virgin Is.", 6567: "NAD83(2011) / Rhode Island", 6568: "NAD83(2011) / Rhode Island (ftUS)", 6569: "NAD83(2011) / South Carolina", 6570: "NAD83(2011) / South Carolina (ft)", 6571: "NAD83(2011) / South Dakota North", 6572: "NAD83(2011) / South Dakota North (ftUS)", 6573: "NAD83(2011) / South Dakota South", 6574: "NAD83(2011) / South Dakota South (ftUS)", 6575: "NAD83(2011) / Tennessee", 6576: "NAD83(2011) / Tennessee (ftUS)", 6577: "NAD83(2011) / Texas Central", 6578: "NAD83(2011) / Texas Central (ftUS)", 6579: "NAD83(2011) / Texas Centric Albers Equal Area", 6580: "NAD83(2011) / Texas Centric Lambert Conformal", 6581: "NAD83(2011) / Texas North", 6582: "NAD83(2011) / Texas North (ftUS)", 6583: "NAD83(2011) / Texas North Central", 6584: "NAD83(2011) / Texas North Central (ftUS)", 6585: "NAD83(2011) / Texas South", 6586: "NAD83(2011) / Texas South (ftUS)", 6587: "NAD83(2011) / Texas South Central", 6588: "NAD83(2011) / Texas South Central (ftUS)", 6589: "NAD83(2011) / Vermont", 6590: "NAD83(2011) / Vermont (ftUS)", 6591: "NAD83(2011) / Virginia Lambert", 6592: "NAD83(2011) / Virginia North", 6593: "NAD83(2011) / Virginia North (ftUS)", 6594: "NAD83(2011) / Virginia South", 6595: "NAD83(2011) / Virginia South (ftUS)", 6596: "NAD83(2011) / Washington North", 6597: "NAD83(2011) / Washington North (ftUS)", 6598: "NAD83(2011) / Washington South", 6599: "NAD83(2011) / Washington South (ftUS)", 6600: "NAD83(2011) / West Virginia North", 6601: "NAD83(2011) / West Virginia North (ftUS)", 6602: "NAD83(2011) / West Virginia South", 6603: "NAD83(2011) / West Virginia South (ftUS)", 6604: "NAD83(2011) / Wisconsin Central", 6605: "NAD83(2011) / Wisconsin Central (ftUS)", 6606: "NAD83(2011) / Wisconsin North", 6607: "NAD83(2011) / Wisconsin North (ftUS)", 6608: "NAD83(2011) / Wisconsin South", 6609: "NAD83(2011) / Wisconsin South (ftUS)", 6610: "NAD83(2011) / Wisconsin Transverse Mercator", 6611: "NAD83(2011) / Wyoming East", 6612: "NAD83(2011) / Wyoming East (ftUS)", 6613: "NAD83(2011) / Wyoming East Central", 6614: "NAD83(2011) / Wyoming East Central (ftUS)", 6615: "NAD83(2011) / Wyoming West", 6616: "NAD83(2011) / Wyoming West (ftUS)", 6617: "NAD83(2011) / Wyoming West Central", 6618: "NAD83(2011) / Wyoming West Central (ftUS)", 6619: "NAD83(2011) / Utah Central", 6620: "NAD83(2011) / Utah North", 6621: "NAD83(2011) / Utah South", 6622: "NAD83(CSRS) / Quebec Lambert", 6623: "NAD83 / Quebec Albers", 6624: "NAD83(CSRS) / Quebec Albers", 6625: "NAD83(2011) / Utah Central (ftUS)", 6626: "NAD83(2011) / Utah North (ftUS)", 6627: "NAD83(2011) / Utah South (ftUS)", 6628: "NAD83(PA11) / Hawaii zone 1", 6629: "NAD83(PA11) / Hawaii zone 2", 6630: "NAD83(PA11) / Hawaii zone 3", 6631: "NAD83(PA11) / Hawaii zone 4", 6632: "NAD83(PA11) / Hawaii zone 5", 6633: "NAD83(PA11) / Hawaii zone 3 (ftUS)", 6634: "NAD83(PA11) / UTM zone 4N", 6635: "NAD83(PA11) / UTM zone 5N", 6636: "NAD83(PA11) / UTM zone 2S", 6637: "NAD83(MA11) / Guam Map Grid", 6646: "Karbala 1979 / Iraq National Grid", 6669: "JGD2011 / Japan Plane Rectangular CS I", 6670: "JGD2011 / Japan Plane Rectangular CS II", 6671: "JGD2011 / Japan Plane Rectangular CS III", 6672: "JGD2011 / Japan Plane Rectangular CS IV", 6673: "JGD2011 / Japan Plane Rectangular CS V", 6674: "JGD2011 / Japan Plane Rectangular CS VI", 6675: "JGD2011 / Japan Plane Rectangular CS VII", 6676: "JGD2011 / Japan Plane Rectangular CS VIII", 6677: "JGD2011 / Japan Plane Rectangular CS IX", 6678: "JGD2011 / Japan Plane Rectangular CS X", 6679: "JGD2011 / Japan Plane Rectangular CS XI", 6680: "JGD2011 / Japan Plane Rectangular CS XII", 6681: "JGD2011 / Japan Plane Rectangular CS XIII", 6682: "JGD2011 / Japan Plane Rectangular CS XIV", 6683: "JGD2011 / Japan Plane Rectangular CS XV", 6684: "JGD2011 / Japan Plane Rectangular CS XVI", 6685: "JGD2011 / Japan Plane Rectangular CS XVII", 6686: "JGD2011 / Japan Plane Rectangular CS XVIII", 6687: "JGD2011 / Japan Plane Rectangular CS XIX", 6688: "JGD2011 / UTM zone 51N", 6689: "JGD2011 / UTM zone 52N", 6690: "JGD2011 / UTM zone 53N", 6691: "JGD2011 / UTM zone 54N", 6692: "JGD2011 / UTM zone 55N", 6703: "WGS 84 / TM 60 SW", 6707: "RDN2008 / TM32", 6708: "RDN2008 / TM33", 6709: "RDN2008 / TM34", 6720: "WGS 84 / CIG92", 6721: "GDA94 / CIG94", 6722: "WGS 84 / CKIG92", 6723: "GDA94 / CKIG94", 6732: "GDA94 / MGA zone 41", 6733: "GDA94 / MGA zone 42", 6734: "GDA94 / MGA zone 43", 6735: "GDA94 / MGA zone 44", 6736: "GDA94 / MGA zone 46", 6737: "GDA94 / MGA zone 47", 6738: "GDA94 / MGA zone 59", 6784: "NAD83(CORS96) / Oregon Baker zone (m)", 6785: "NAD83(CORS96) / Oregon Baker zone (ft)", 6786: "NAD83(2011) / Oregon Baker zone (m)", 6787: "NAD83(2011) / Oregon Baker zone (ft)", 6788: "NAD83(CORS96) / Oregon Bend-Klamath Falls zone (m)", 6789: "NAD83(CORS96) / Oregon Bend-Klamath Falls zone (ft)", 6790: "NAD83(2011) / Oregon Bend-Klamath Falls zone (m)", 6791: "NAD83(2011) / Oregon Bend-Klamath Falls zone (ft)", 6792: "NAD83(CORS96) / Oregon Bend-Redmond-Prineville zone (m)", 6793: "NAD83(CORS96) / Oregon Bend-Redmond-Prineville zone (ft)", 6794: "NAD83(2011) / Oregon Bend-Redmond-Prineville zone (m)", 6795: "NAD83(2011) / Oregon Bend-Redmond-Prineville zone (ft)", 6796: "NAD83(CORS96) / Oregon Bend-Burns zone (m)", 6797: "NAD83(CORS96) / Oregon Bend-Burns zone (ft)", 6798: "NAD83(2011) / Oregon Bend-Burns zone (m)", 6799: "NAD83(2011) / Oregon Bend-Burns zone (ft)", 6800: "NAD83(CORS96) / Oregon Canyonville-Grants Pass zone (m)", 6801: "NAD83(CORS96) / Oregon Canyonville-Grants Pass zone (ft)", 6802: "NAD83(2011) / Oregon Canyonville-Grants Pass zone (m)", 6803: "NAD83(2011) / Oregon Canyonville-Grants Pass zone (ft)", 6804: "NAD83(CORS96) / Oregon Columbia River East zone (m)", 6805: "NAD83(CORS96) / Oregon Columbia River East zone (ft)", 6806: "NAD83(2011) / Oregon Columbia River East zone (m)", 6807: "NAD83(2011) / Oregon Columbia River East zone (ft)", 6808: "NAD83(CORS96) / Oregon Columbia River West zone (m)", 6809: "NAD83(CORS96) / Oregon Columbia River West zone (ft)", 6810: "NAD83(2011) / Oregon Columbia River West zone (m)", 6811: "NAD83(2011) / Oregon Columbia River West zone (ft)", 6812: "NAD83(CORS96) / Oregon Cottage Grove-Canyonville zone (m)", 6813: "NAD83(CORS96) / Oregon Cottage Grove-Canyonville zone (ft)", 6814: "NAD83(2011) / Oregon Cottage Grove-Canyonville zone (m)", 6815: "NAD83(2011) / Oregon Cottage Grove-Canyonville zone (ft)", 6816: "NAD83(CORS96) / Oregon Dufur-Madras zone (m)", 6817: "NAD83(CORS96) / Oregon Dufur-Madras zone (ft)", 6818: "NAD83(2011) / Oregon Dufur-Madras zone (m)", 6819: "NAD83(2011) / Oregon Dufur-Madras zone (ft)", 6820: "NAD83(CORS96) / Oregon Eugene zone (m)", 6821: "NAD83(CORS96) / Oregon Eugene zone (ft)", 6822: "NAD83(2011) / Oregon Eugene zone (m)", 6823: "NAD83(2011) / Oregon Eugene zone (ft)", 6824: "NAD83(CORS96) / Oregon Grants Pass-Ashland zone (m)", 6825: "NAD83(CORS96) / Oregon Grants Pass-Ashland zone (ft)", 6826: "NAD83(2011) / Oregon Grants Pass-Ashland zone (m)", 6827: "NAD83(2011) / Oregon Grants Pass-Ashland zone (ft)", 6828: "NAD83(CORS96) / Oregon Gresham-Warm Springs zone (m)", 6829: "NAD83(CORS96) / Oregon Gresham-Warm Springs zone (ft)", 6830: "NAD83(2011) / Oregon Gresham-Warm Springs zone (m)", 6831: "NAD83(2011) / Oregon Gresham-Warm Springs zone (ft)", 6832: "NAD83(CORS96) / Oregon La Grande zone (m)", 6833: "NAD83(CORS96) / Oregon La Grande zone (ft)", 6834: "NAD83(2011) / Oregon La Grande zone (m)", 6835: "NAD83(2011) / Oregon La Grande zone (ft)", 6836: "NAD83(CORS96) / Oregon Ontario zone (m)", 6837: "NAD83(CORS96) / Oregon Ontario zone (ft)", 6838: "NAD83(2011) / Oregon Ontario zone (m)", 6839: "NAD83(2011) / Oregon Ontario zone (ft)", 6840: "NAD83(CORS96) / Oregon Coast zone (m)", 6841: "NAD83(CORS96) / Oregon Coast zone (ft)", 6842: "NAD83(2011) / Oregon Coast zone (m)", 6843: "NAD83(2011) / Oregon Coast zone (ft)", 6844: "NAD83(CORS96) / Oregon Pendleton zone (m)", 6845: "NAD83(CORS96) / Oregon Pendleton zone (ft)", 6846: "NAD83(2011) / Oregon Pendleton zone (m)", 6847: "NAD83(2011) / Oregon Pendleton zone (ft)", 6848: "NAD83(CORS96) / Oregon Pendleton-La Grande zone (m)", 6849: "NAD83(CORS96) / Oregon Pendleton-La Grande zone (ft)", 6850: "NAD83(2011) / Oregon Pendleton-La Grande zone (m)", 6851: "NAD83(2011) / Oregon Pendleton-La Grande zone (ft)", 6852: "NAD83(CORS96) / Oregon Portland zone (m)", 6853: "NAD83(CORS96) / Oregon Portland zone (ft)", 6854: "NAD83(2011) / Oregon Portland zone (m)", 6855: "NAD83(2011) / Oregon Portland zone (ft)", 6856: "NAD83(CORS96) / Oregon Salem zone (m)", 6857: "NAD83(CORS96) / Oregon Salem zone (ft)", 6858: "NAD83(2011) / Oregon Salem zone (m)", 6859: "NAD83(2011) / Oregon Salem zone (ft)", 6860: "NAD83(CORS96) / Oregon Santiam Pass zone (m)", 6861: "NAD83(CORS96) / Oregon Santiam Pass zone (ft)", 6862: "NAD83(2011) / Oregon Santiam Pass zone (m)", 6863: "NAD83(2011) / Oregon Santiam Pass (ft)", 6867: "NAD83(CORS96) / Oregon LCC (m)", 6868: "NAD83(CORS96) / Oregon GIC Lambert (ft)", 6870: "ETRS89 / Albania TM 2010", 6875: "RDN2008 / Italy zone", 6876: "RDN2008 / Zone 12", 6879: "NAD83(2011) / Wisconsin Central", 6880: "NAD83(2011) / Nebraska (ftUS)", 6884: "NAD83(CORS96) / Oregon North", 6885: "NAD83(CORS96) / Oregon North (ft)", 6886: "NAD83(CORS96) / Oregon South", 6887: "NAD83(CORS96) / Oregon South (ft)", 6915: "South East Island 1943 / UTM zone 40N", 6922: "NAD83 / Kansas LCC", 6923: "NAD83 / Kansas LCC (ftUS)", 6924: "NAD83(2011) / Kansas LCC", 6925: "NAD83(2011) / Kansas LCC (ftUS)", 6931: "WGS 84 / NSIDC EASE-Grid 2.0 North", 6932: "WGS 84 / NSIDC EASE-Grid 2.0 South", 6933: "WGS 84 / NSIDC EASE-Grid 2.0 Global", 6956: "VN-2000 / TM-3 zone 481", 6957: "VN-2000 / TM-3 zone 482", 6958: "VN-2000 / TM-3 zone 491", 6959: "VN-2000 / TM-3 Da Nang zone", 6962: "ETRS89 / Albania LCC 2010", 6966: "NAD27 / Michigan North", 6984: "Israeli Grid 05", 6991: "Israeli Grid 05/12", 6996: "NAD83(2011) / San Francisco CS13", 6997: "NAD83(2011) / San Francisco CS13 (ftUS)", 7005: "Nahrwan 1934 / UTM zone 37N", 7006: "Nahrwan 1934 / UTM zone 38N", 7007: "Nahrwan 1934 / UTM zone 39N", 7057: "NAD83(2011) / IaRCS zone 1", 7058: "NAD83(2011) / IaRCS zone 2", 7059: "NAD83(2011) / IaRCS zone 3", 7060: "NAD83(2011) / IaRCS zone 4", 7061: "NAD83(2011) / IaRCS zone 5", 7062: "NAD83(2011) / IaRCS zone 6", 7063: "NAD83(2011) / IaRCS zone 7", 7064: "NAD83(2011) / IaRCS zone 8", 7065: "NAD83(2011) / IaRCS zone 9", 7066: "NAD83(2011) / IaRCS zone 10", 7067: "NAD83(2011) / IaRCS zone 11", 7068: "NAD83(2011) / IaRCS zone 12", 7069: "NAD83(2011) / IaRCS zone 13", 7070: "NAD83(2011) / IaRCS zone 14", 7074: "RGTAAF07 / UTM zone 37S", 7075: "RGTAAF07 / UTM zone 38S", 7076: "RGTAAF07 / UTM zone 39S", 7077: "RGTAAF07 / UTM zone 40S", 7078: "RGTAAF07 / UTM zone 41S", 7079: "RGTAAF07 / UTM zone 42S", 7080: "RGTAAF07 / UTM zone 43S", 7081: "RGTAAF07 / UTM zone 44S", 7082: "RGTAAF07 / Terre Adelie Polar Stereographic", 7109: "NAD83(2011) / RMTCRS St Mary (m)", 7110: "NAD83(2011) / RMTCRS Blackfeet (m)", 7111: "NAD83(2011) / RMTCRS Milk River (m)", 7112: "NAD83(2011) / RMTCRS Fort Belknap (m)", 7113: "NAD83(2011) / RMTCRS Fort Peck Assiniboine (m)", 7114: "NAD83(2011) / RMTCRS Fort Peck Sioux (m)", 7115: "NAD83(2011) / RMTCRS Crow (m)", 7116: "NAD83(2011) / RMTCRS Bobcat (m)", 7117: "NAD83(2011) / RMTCRS Billings (m)", 7118: "NAD83(2011) / RMTCRS Wind River (m)", 7119: "NAD83(2011) / RMTCRS St Mary (ft)", 7120: "NAD83(2011) / RMTCRS Blackfeet (ft)", 7121: "NAD83(2011) / RMTCRS Milk River (ft)", 7122: "NAD83(2011) / RMTCRS Fort Belknap (ft)", 7123: "NAD83(2011) / RMTCRS Fort Peck Assiniboine (ft)", 7124: "NAD83(2011) / RMTCRS Fort Peck Sioux (ft)", 7125: "NAD83(2011) / RMTCRS Crow (ft)", 7126: "NAD83(2011) / RMTCRS Bobcat (ft)", 7127: "NAD83(2011) / RMTCRS Billings (ft)", 7128: "NAD83(2011) / RMTCRS Wind River (ftUS)", 7131: "NAD83(2011) / San Francisco CS13", 7132: "NAD83(2011) / San Francisco CS13 (ftUS)", 7142: "Palestine 1923 / Palestine Grid modified", 7257: "NAD83(2011) / InGCS Adams (m)", 7258: "NAD83(2011) / InGCS Adams (ftUS)", 7259: "NAD83(2011) / InGCS Allen (m)", 7260: "NAD83(2011) / InGCS Allen (ftUS)", 7261: "NAD83(2011) / InGCS Bartholomew (m)", 7262: "NAD83(2011) / InGCS Bartholomew (ftUS)", 7263: "NAD83(2011) / InGCS Benton (m)", 7264: "NAD83(2011) / InGCS Benton (ftUS)", 7265: "NAD83(2011) / InGCS Blackford-Delaware (m)", 7266: "NAD83(2011) / InGCS Blackford-Delaware (ftUS)", 7267: "NAD83(2011) / InGCS Boone-Hendricks (m)", 7268: "NAD83(2011) / InGCS Boone-Hendricks (ftUS)", 7269: "NAD83(2011) / InGCS Brown (m)", 7270: "NAD83(2011) / InGCS Brown (ftUS)", 7271: "NAD83(2011) / InGCS Carroll (m)", 7272: "NAD83(2011) / InGCS Carroll (ftUS)", 7273: "NAD83(2011) / InGCS Cass (m)", 7274: "NAD83(2011) / InGCS Cass (ftUS)", 7275: "NAD83(2011) / InGCS Clark-Floyd-Scott (m)", 7276: "NAD83(2011) / InGCS Clark-Floyd-Scott (ftUS)", 7277: "NAD83(2011) / InGCS Clay (m)", 7278: "NAD83(2011) / InGCS Clay (ftUS)", 7279: "NAD83(2011) / InGCS Clinton (m)", 7280: "NAD83(2011) / InGCS Clinton (ftUS)", 7281: "NAD83(2011) / InGCS Crawford-Lawrence-Orange (m)", 7282: "NAD83(2011) / InGCS Crawford-Lawrence-Orange (ftUS)", 7283: "NAD83(2011) / InGCS Daviess-Greene (m)", 7284: "NAD83(2011) / InGCS Daviess-Greene (ftUS)", 7285: "NAD83(2011) / InGCS Dearborn-Ohio-Switzerland (m)", 7286: "NAD83(2011) / InGCS Dearborn-Ohio-Switzerland (ftUS)", 7287: "NAD83(2011) / InGCS Decatur-Rush (m)", 7288: "NAD83(2011) / InGCS Decatur-Rush (ftUS)", 7289: "NAD83(2011) / InGCS DeKalb (m)", 7290: "NAD83(2011) / InGCS DeKalb (ftUS)", 7291: "NAD83(2011) / InGCS Dubois-Martin (m)", 7292: "NAD83(2011) / InGCS Dubois-Martin (ftUS)", 7293: "NAD83(2011) / InGCS Elkhart-Kosciusko-Wabash (m)", 7294: "NAD83(2011) / InGCS Elkhart-Kosciusko-Wabash (ftUS)", 7295: "NAD83(2011) / InGCS Fayette-Franklin-Union (m)", 7296: "NAD83(2011) / InGCS Fayette-Franklin-Union (ftUS)", 7297: "NAD83(2011) / InGCS Fountain-Warren (m)", 7298: "NAD83(2011) / InGCS Fountain-Warren (ftUS)", 7299: "NAD83(2011) / InGCS Fulton-Marshall-St. Joseph (m)", 7300: "NAD83(2011) / InGCS Fulton-Marshall-St. Joseph (ftUS)", 7301: "NAD83(2011) / InGCS Gibson (m)", 7302: "NAD83(2011) / InGCS Gibson (ftUS)", 7303: "NAD83(2011) / InGCS Grant (m)", 7304: "NAD83(2011) / InGCS Grant (ftUS)", 7305: "NAD83(2011) / InGCS Hamilton-Tipton (m)", 7306: "NAD83(2011) / InGCS Hamilton-Tipton (ftUS)", 7307: "NAD83(2011) / InGCS Hancock-Madison (m)", 7308: "NAD83(2011) / InGCS Hancock-Madison (ftUS)", 7309: "NAD83(2011) / InGCS Harrison-Washington (m)", 7310: "NAD83(2011) / InGCS Harrison-Washington (ftUS)", 7311: "NAD83(2011) / InGCS Henry (m)", 7312: "NAD83(2011) / InGCS Henry (ftUS)", 7313: "NAD83(2011) / InGCS Howard-Miami (m)", 7314: "NAD83(2011) / InGCS Howard-Miami (ftUS)", 7315: "NAD83(2011) / InGCS Huntington-Whitley (m)", 7316: "NAD83(2011) / InGCS Huntington-Whitley (ftUS)", 7317: "NAD83(2011) / InGCS Jackson (m)", 7318: "NAD83(2011) / InGCS Jackson (ftUS)", 7319: "NAD83(2011) / InGCS Jasper-Porter (m)", 7320: "NAD83(2011) / InGCS Jasper-Porter (ftUS)", 7321: "NAD83(2011) / InGCS Jay (m)", 7322: "NAD83(2011) / InGCS Jay (ftUS)", 7323: "NAD83(2011) / InGCS Jefferson (m)", 7324: "NAD83(2011) / InGCS Jefferson (ftUS)", 7325: "NAD83(2011) / InGCS Jennings (m)", 7326: "NAD83(2011) / InGCS Jennings (ftUS)", 7327: "NAD83(2011) / InGCS Johnson-Marion (m)", 7328: "NAD83(2011) / InGCS Johnson-Marion (ftUS)", 7329: "NAD83(2011) / InGCS Knox (m)", 7330: "NAD83(2011) / InGCS Knox (ftUS)", 7331: "NAD83(2011) / InGCS LaGrange-Noble (m)", 7332: "NAD83(2011) / InGCS LaGrange-Noble (ftUS)", 7333: "NAD83(2011) / InGCS Lake-Newton (m)", 7334: "NAD83(2011) / InGCS Lake-Newton (ftUS)", 7335: "NAD83(2011) / InGCS LaPorte-Pulaski-Starke (m)", 7336: "NAD83(2011) / InGCS LaPorte-Pulaski-Starke (ftUS)", 7337: "NAD83(2011) / InGCS Monroe-Morgan (m)", 7338: "NAD83(2011) / InGCS Monroe-Morgan (ftUS)", 7339: "NAD83(2011) / InGCS Montgomery-Putnam (m)", 7340: "NAD83(2011) / InGCS Montgomery-Putnam (ftUS)", 7341: "NAD83(2011) / InGCS Owen (m)", 7342: "NAD83(2011) / InGCS Owen (ftUS)", 7343: "NAD83(2011) / InGCS Parke-Vermillion (m)", 7344: "NAD83(2011) / InGCS Parke-Vermillion (ftUS)", 7345: "NAD83(2011) / InGCS Perry (m)", 7346: "NAD83(2011) / InGCS Perry (ftUS)", 7347: "NAD83(2011) / InGCS Pike-Warrick (m)", 7348: "NAD83(2011) / InGCS Pike-Warrick (ftUS)", 7349: "NAD83(2011) / InGCS Posey (m)", 7350: "NAD83(2011) / InGCS Posey (ftUS)", 7351: "NAD83(2011) / InGCS Randolph-Wayne (m)", 7352: "NAD83(2011) / InGCS Randolph-Wayne (ftUS)", 7353: "NAD83(2011) / InGCS Ripley (m)", 7354: "NAD83(2011) / InGCS Ripley (ftUS)", 7355: "NAD83(2011) / InGCS Shelby (m)", 7356: "NAD83(2011) / InGCS Shelby (ftUS)", 7357: "NAD83(2011) / InGCS Spencer (m)", 7358: "NAD83(2011) / InGCS Spencer (ftUS)", 7359: "NAD83(2011) / InGCS Steuben (m)", 7360: "NAD83(2011) / InGCS Steuben (ftUS)", 7361: "NAD83(2011) / InGCS Sullivan (m)", 7362: "NAD83(2011) / InGCS Sullivan (ftUS)", 7363: "NAD83(2011) / InGCS Tippecanoe-White (m)", 7364: "NAD83(2011) / InGCS Tippecanoe-White (ftUS)", 7365: "NAD83(2011) / InGCS Vanderburgh (m)", 7366: "NAD83(2011) / InGCS Vanderburgh (ftUS)", 7367: "NAD83(2011) / InGCS Vigo (m)", 7368: "NAD83(2011) / InGCS Vigo (ftUS)", 7369: "NAD83(2011) / InGCS Wells (m)", 7370: "NAD83(2011) / InGCS Wells (ftUS)", 7374: "ONGD14 / UTM zone 39N", 7375: "ONGD14 / UTM zone 40N", 7376: "ONGD14 / UTM zone 41N", 7528: "NAD83(2011) / WISCRS Adams and Juneau (m)", 7529: "NAD83(2011) / WISCRS Ashland (m)", 7530: "NAD83(2011) / WISCRS Barron (m)", 7531: "NAD83(2011) / WISCRS Bayfield (m)", 7532: "NAD83(2011) / WISCRS Brown (m)", 7533: "NAD83(2011) / WISCRS Buffalo (m)", 7534: "NAD83(2011) / WISCRS Burnett (m)", 7535: "NAD83(2011) / WISCRS Calumet, Fond du Lac, Outagamie and Winnebago (m)", 7536: "NAD83(2011) / WISCRS Chippewa (m)", 7537: "NAD83(2011) / WISCRS Clark (m)", 7538: "NAD83(2011) / WISCRS Columbia (m)", 7539: "NAD83(2011) / WISCRS Crawford (m)", 7540: "NAD83(2011) / WISCRS Dane (m)", 7541: "NAD83(2011) / WISCRS Dodge and Jefferson (m)", 7542: "NAD83(2011) / WISCRS Door (m)", 7543: "NAD83(2011) / WISCRS Douglas (m)", 7544: "NAD83(2011) / WISCRS Dunn (m)", 7545: "NAD83(2011) / WISCRS Eau Claire (m)", 7546: "NAD83(2011) / WISCRS Florence (m)", 7547: "NAD83(2011) / WISCRS Forest (m)", 7548: "NAD83(2011) / WISCRS Grant (m)", 7549: "NAD83(2011) / WISCRS Green and Lafayette (m)", 7550: "NAD83(2011) / WISCRS Green Lake and Marquette (m)", 7551: "NAD83(2011) / WISCRS Iowa (m)", 7552: "NAD83(2011) / WISCRS Iron (m)", 7553: "NAD83(2011) / WISCRS Jackson (m)", 7554: "NAD83(2011) / WISCRS Kenosha, Milwaukee, Ozaukee and Racine (m)", 7555: "NAD83(2011) / WISCRS Kewaunee, Manitowoc and Sheboygan (m)", 7556: "NAD83(2011) / WISCRS La Crosse (m)", 7557: "NAD83(2011) / WISCRS Langlade (m)", 7558: "NAD83(2011) / WISCRS Lincoln (m)", 7559: "NAD83(2011) / WISCRS Marathon (m)", 7560: "NAD83(2011) / WISCRS Marinette (m)", 7561: "NAD83(2011) / WISCRS Menominee (m)", 7562: "NAD83(2011) / WISCRS Monroe (m)", 7563: "NAD83(2011) / WISCRS Oconto (m)", 7564: "NAD83(2011) / WISCRS Oneida (m)", 7565: "NAD83(2011) / WISCRS Pepin and Pierce (m)", 7566: "NAD83(2011) / WISCRS Polk (m)", 7567: "NAD83(2011) / WISCRS Portage (m)", 7568: "NAD83(2011) / WISCRS Price (m)", 7569: "NAD83(2011) / WISCRS Richland (m)", 7570: "NAD83(2011) / WISCRS Rock (m)", 7571: "NAD83(2011) / WISCRS Rusk (m)", 7572: "NAD83(2011) / WISCRS Sauk (m)", 7573: "NAD83(2011) / WISCRS Sawyer (m)", 7574: "NAD83(2011) / WISCRS Shawano (m)", 7575: "NAD83(2011) / WISCRS St. Croix (m)", 7576: "NAD83(2011) / WISCRS Taylor (m)", 7577: "NAD83(2011) / WISCRS Trempealeau (m)", 7578: "NAD83(2011) / WISCRS Vernon (m)", 7579: "NAD83(2011) / WISCRS Vilas (m)", 7580: "NAD83(2011) / WISCRS Walworth (m)", 7581: "NAD83(2011) / WISCRS Washburn (m)", 7582: "NAD83(2011) / WISCRS Washington (m)", 7583: "NAD83(2011) / WISCRS Waukesha (m)", 7584: "NAD83(2011) / WISCRS Waupaca (m)", 7585: "NAD83(2011) / WISCRS Waushara (m)", 7586: "NAD83(2011) / WISCRS Wood (m)", 7587: "NAD83(2011) / WISCRS Adams and Juneau (ftUS)", 7588: "NAD83(2011) / WISCRS Ashland (ftUS)", 7589: "NAD83(2011) / WISCRS Barron (ftUS)", 7590: "NAD83(2011) / WISCRS Bayfield (ftUS)", 7591: "NAD83(2011) / WISCRS Brown (ftUS)", 7592: "NAD83(2011) / WISCRS Buffalo (ftUS)", 7593: "NAD83(2011) / WISCRS Burnett (ftUS)", 7594: "NAD83(2011) / WISCRS Calumet, Fond du Lac, Outagamie and Winnebago (ftUS)", 7595: "NAD83(2011) / WISCRS Chippewa (ftUS)", 7596: "NAD83(2011) / WISCRS Clark (ftUS)", 7597: "NAD83(2011) / WISCRS Columbia (ftUS)", 7598: "NAD83(2011) / WISCRS Crawford (ftUS)", 7599: "NAD83(2011) / WISCRS Dane (ftUS)", 7600: "NAD83(2011) / WISCRS Dodge and Jefferson (ftUS)", 7601: "NAD83(2011) / WISCRS Door (ftUS)", 7602: "NAD83(2011) / WISCRS Douglas (ftUS)", 7603: "NAD83(2011) / WISCRS Dunn (ftUS)", 7604: "NAD83(2011) / WISCRS Eau Claire (ftUS)", 7605: "NAD83(2011) / WISCRS Florence (ftUS)", 7606: "NAD83(2011) / WISCRS Forest (ftUS)", 7607: "NAD83(2011) / WISCRS Grant (ftUS)", 7608: "NAD83(2011) / WISCRS Green and Lafayette (ftUS)", 7609: "NAD83(2011) / WISCRS Green Lake and Marquette (ftUS)", 7610: "NAD83(2011) / WISCRS Iowa (ftUS)", 7611: "NAD83(2011) / WISCRS Iron (ftUS)", 7612: "NAD83(2011) / WISCRS Jackson (ftUS)", 7613: "NAD83(2011) / WISCRS Kenosha, Milwaukee, Ozaukee and Racine (ftUS)", 7614: "NAD83(2011) / WISCRS Kewaunee, Manitowoc and Sheboygan (ftUS)", 7615: "NAD83(2011) / WISCRS La Crosse (ftUS)", 7616: "NAD83(2011) / WISCRS Langlade (ftUS)", 7617: "NAD83(2011) / WISCRS Lincoln (ftUS)", 7618: "NAD83(2011) / WISCRS Marathon (ftUS)", 7619: "NAD83(2011) / WISCRS Marinette (ftUS)", 7620: "NAD83(2011) / WISCRS Menominee (ftUS)", 7621: "NAD83(2011) / WISCRS Monroe (ftUS)", 7622: "NAD83(2011) / WISCRS Oconto (ftUS)", 7623: "NAD83(2011) / WISCRS Oneida (ftUS)", 7624: "NAD83(2011) / WISCRS Pepin and Pierce (ftUS)", 7625: "NAD83(2011) / WISCRS Polk (ftUS)", 7626: "NAD83(2011) / WISCRS Portage (ftUS)", 7627: "NAD83(2011) / WISCRS Price (ftUS)", 7628: "NAD83(2011) / WISCRS Richland (ftUS)", 7629: "NAD83(2011) / WISCRS Rock (ftUS)", 7630: "NAD83(2011) / WISCRS Rusk (ftUS)", 7631: "NAD83(2011) / WISCRS Sauk (ftUS)", 7632: "NAD83(2011) / WISCRS Sawyer (ftUS)", 7633: "NAD83(2011) / WISCRS Shawano (ftUS)", 7634: "NAD83(2011) / WISCRS St. Croix (ftUS)", 7635: "NAD83(2011) / WISCRS Taylor (ftUS)", 7636: "NAD83(2011) / WISCRS Trempealeau (ftUS)", 7637: "NAD83(2011) / WISCRS Vernon (ftUS)", 7638: "NAD83(2011) / WISCRS Vilas (ftUS)", 7639: "NAD83(2011) / WISCRS Walworth (ftUS)", 7640: "NAD83(2011) / WISCRS Washburn (ftUS)", 7641: "NAD83(2011) / WISCRS Washington (ftUS)", 7642: "NAD83(2011) / WISCRS Waukesha (ftUS)", 7643: "NAD83(2011) / WISCRS Waupaca (ftUS)", 7644: "NAD83(2011) / WISCRS Waushara (ftUS)", 7645: "NAD83(2011) / WISCRS Wood (ftUS)", 7692: "Kyrg-06 / zone 1", 7693: "Kyrg-06 / zone 2", 7694: "Kyrg-06 / zone 3", 7695: "Kyrg-06 / zone 4", 7696: "Kyrg-06 / zone 5", 20004: "Pulkovo 1995 / Gauss-Kruger zone 4", 20005: "Pulkovo 1995 / Gauss-Kruger zone 5", 20006: "Pulkovo 1995 / Gauss-Kruger zone 6", 20007: "Pulkovo 1995 / Gauss-Kruger zone 7", 20008: "Pulkovo 1995 / Gauss-Kruger zone 8", 20009: "Pulkovo 1995 / Gauss-Kruger zone 9", 20010: "Pulkovo 1995 / Gauss-Kruger zone 10", 20011: "Pulkovo 1995 / Gauss-Kruger zone 11", 20012: "Pulkovo 1995 / Gauss-Kruger zone 12", 20013: "Pulkovo 1995 / Gauss-Kruger zone 13", 20014: "Pulkovo 1995 / Gauss-Kruger zone 14", 20015: "Pulkovo 1995 / Gauss-Kruger zone 15", 20016: "Pulkovo 1995 / Gauss-Kruger zone 16", 20017: "Pulkovo 1995 / Gauss-Kruger zone 17", 20018: "Pulkovo 1995 / Gauss-Kruger zone 18", 20019: "Pulkovo 1995 / Gauss-Kruger zone 19", 20020: "Pulkovo 1995 / Gauss-Kruger zone 20", 20021: "Pulkovo 1995 / Gauss-Kruger zone 21", 20022: "Pulkovo 1995 / Gauss-Kruger zone 22", 20023: "Pulkovo 1995 / Gauss-Kruger zone 23", 20024: "Pulkovo 1995 / Gauss-Kruger zone 24", 20025: "Pulkovo 1995 / Gauss-Kruger zone 25", 20026: "Pulkovo 1995 / Gauss-Kruger zone 26", 20027: "Pulkovo 1995 / Gauss-Kruger zone 27", 20028: "Pulkovo 1995 / Gauss-Kruger zone 28", 20029: "Pulkovo 1995 / Gauss-Kruger zone 29", 20030: "Pulkovo 1995 / Gauss-Kruger zone 30", 20031: "Pulkovo 1995 / Gauss-Kruger zone 31", 20032: "Pulkovo 1995 / Gauss-Kruger zone 32", 20064: "Pulkovo 1995 / Gauss-Kruger 4N", 20065: "Pulkovo 1995 / Gauss-Kruger 5N", 20066: "Pulkovo 1995 / Gauss-Kruger 6N", 20067: "Pulkovo 1995 / Gauss-Kruger 7N", 20068: "Pulkovo 1995 / Gauss-Kruger 8N", 20069: "Pulkovo 1995 / Gauss-Kruger 9N", 20070: "Pulkovo 1995 / Gauss-Kruger 10N", 20071: "Pulkovo 1995 / Gauss-Kruger 11N", 20072: "Pulkovo 1995 / Gauss-Kruger 12N", 20073: "Pulkovo 1995 / Gauss-Kruger 13N", 20074: "Pulkovo 1995 / Gauss-Kruger 14N", 20075: "Pulkovo 1995 / Gauss-Kruger 15N", 20076: "Pulkovo 1995 / Gauss-Kruger 16N", 20077: "Pulkovo 1995 / Gauss-Kruger 17N", 20078: "Pulkovo 1995 / Gauss-Kruger 18N", 20079: "Pulkovo 1995 / Gauss-Kruger 19N", 20080: "Pulkovo 1995 / Gauss-Kruger 20N", 20081: "Pulkovo 1995 / Gauss-Kruger 21N", 20082: "Pulkovo 1995 / Gauss-Kruger 22N", 20083: "Pulkovo 1995 / Gauss-Kruger 23N", 20084: "Pulkovo 1995 / Gauss-Kruger 24N", 20085: "Pulkovo 1995 / Gauss-Kruger 25N", 20086: "Pulkovo 1995 / Gauss-Kruger 26N", 20087: "Pulkovo 1995 / Gauss-Kruger 27N", 20088: "Pulkovo 1995 / Gauss-Kruger 28N", 20089: "Pulkovo 1995 / Gauss-Kruger 29N", 20090: "Pulkovo 1995 / Gauss-Kruger 30N", 20091: "Pulkovo 1995 / Gauss-Kruger 31N", 20092: "Pulkovo 1995 / Gauss-Kruger 32N", 20135: "Adindan / UTM zone 35N", 20136: "Adindan / UTM zone 36N", 20137: "Adindan / UTM zone 37N", 20138: "Adindan / UTM zone 38N", 20248: "AGD66 / AMG zone 48", 20249: "AGD66 / AMG zone 49", 20250: "AGD66 / AMG zone 50", 20251: "AGD66 / AMG zone 51", 20252: "AGD66 / AMG zone 52", 20253: "AGD66 / AMG zone 53", 20254: "AGD66 / AMG zone 54", 20255: "AGD66 / AMG zone 55", 20256: "AGD66 / AMG zone 56", 20257: "AGD66 / AMG zone 57", 20258: "AGD66 / AMG zone 58", 20348: "AGD84 / AMG zone 48", 20349: "AGD84 / AMG zone 49", 20350: "AGD84 / AMG zone 50", 20351: "AGD84 / AMG zone 51", 20352: "AGD84 / AMG zone 52", 20353: "AGD84 / AMG zone 53", 20354: "AGD84 / AMG zone 54", 20355: "AGD84 / AMG zone 55", 20356: "AGD84 / AMG zone 56", 20357: "AGD84 / AMG zone 57", 20358: "AGD84 / AMG zone 58", 20436: "Ain el Abd / UTM zone 36N", 20437: "Ain el Abd / UTM zone 37N", 20438: "Ain el Abd / UTM zone 38N", 20439: "Ain el Abd / UTM zone 39N", 20440: "Ain el Abd / UTM zone 40N", 20499: "Ain el Abd / Bahrain Grid", 20538: "Afgooye / UTM zone 38N", 20539: "Afgooye / UTM zone 39N", 20790: "Lisbon (Lisbon) / Portuguese National Grid", 20791: "Lisbon (Lisbon) / Portuguese Grid", 20822: "Aratu / UTM zone 22S", 20823: "Aratu / UTM zone 23S", 20824: "Aratu / UTM zone 24S", 20934: "Arc 1950 / UTM zone 34S", 20935: "Arc 1950 / UTM zone 35S", 20936: "Arc 1950 / UTM zone 36S", 21035: "Arc 1960 / UTM zone 35S", 21036: "Arc 1960 / UTM zone 36S", 21037: "Arc 1960 / UTM zone 37S", 21095: "Arc 1960 / UTM zone 35N", 21096: "Arc 1960 / UTM zone 36N", 21097: "Arc 1960 / UTM zone 37N", 21100: "Batavia (Jakarta) / NEIEZ", 21148: "Batavia / UTM zone 48S", 21149: "Batavia / UTM zone 49S", 21150: "Batavia / UTM zone 50S", 21291: "Barbados 1938 / British West Indies Grid", 21292: "Barbados 1938 / Barbados National Grid", 21413: "Beijing 1954 / Gauss-Kruger zone 13", 21414: "Beijing 1954 / Gauss-Kruger zone 14", 21415: "Beijing 1954 / Gauss-Kruger zone 15", 21416: "Beijing 1954 / Gauss-Kruger zone 16", 21417: "Beijing 1954 / Gauss-Kruger zone 17", 21418: "Beijing 1954 / Gauss-Kruger zone 18", 21419: "Beijing 1954 / Gauss-Kruger zone 19", 21420: "Beijing 1954 / Gauss-Kruger zone 20", 21421: "Beijing 1954 / Gauss-Kruger zone 21", 21422: "Beijing 1954 / Gauss-Kruger zone 22", 21423: "Beijing 1954 / Gauss-Kruger zone 23", 21453: "Beijing 1954 / Gauss-Kruger CM 75E", 21454: "Beijing 1954 / Gauss-Kruger CM 81E", 21455: "Beijing 1954 / Gauss-Kruger CM 87E", 21456: "Beijing 1954 / Gauss-Kruger CM 93E", 21457: "Beijing 1954 / Gauss-Kruger CM 99E", 21458: "Beijing 1954 / Gauss-Kruger CM 105E", 21459: "Beijing 1954 / Gauss-Kruger CM 111E", 21460: "Beijing 1954 / Gauss-Kruger CM 117E", 21461: "Beijing 1954 / Gauss-Kruger CM 123E", 21462: "Beijing 1954 / Gauss-Kruger CM 129E", 21463: "Beijing 1954 / Gauss-Kruger CM 135E", 21473: "Beijing 1954 / Gauss-Kruger 13N", 21474: "Beijing 1954 / Gauss-Kruger 14N", 21475: "Beijing 1954 / Gauss-Kruger 15N", 21476: "Beijing 1954 / Gauss-Kruger 16N", 21477: "Beijing 1954 / Gauss-Kruger 17N", 21478: "Beijing 1954 / Gauss-Kruger 18N", 21479: "Beijing 1954 / Gauss-Kruger 19N", 21480: "Beijing 1954 / Gauss-Kruger 20N", 21481: "Beijing 1954 / Gauss-Kruger 21N", 21482: "Beijing 1954 / Gauss-Kruger 22N", 21483: "Beijing 1954 / Gauss-Kruger 23N", 21500: "Belge 1950 (Brussels) / Belge Lambert 50", 21780: "Bern 1898 (Bern) / LV03C", 21781: "CH1903 / LV03", 21782: "CH1903 / LV03C-G", 21817: "Bogota 1975 / UTM zone 17N", 21818: "Bogota 1975 / UTM zone 18N", 21891: "Bogota 1975 / Colombia West zone", 21892: "Bogota 1975 / Colombia Bogota zone", 21893: "Bogota 1975 / Colombia East Central zone", 21894: "Bogota 1975 / Colombia East", 21896: "Bogota 1975 / Colombia West zone", 21897: "Bogota 1975 / Colombia Bogota zone", 21898: "Bogota 1975 / Colombia East Central zone", 21899: "Bogota 1975 / Colombia East", 22032: "Camacupa / UTM zone 32S", 22033: "Camacupa / UTM zone 33S", 22091: "Camacupa / TM 11.30 SE", 22092: "Camacupa / TM 12 SE", 22171: "POSGAR 98 / Argentina 1", 22172: "POSGAR 98 / Argentina 2", 22173: "POSGAR 98 / Argentina 3", 22174: "POSGAR 98 / Argentina 4", 22175: "POSGAR 98 / Argentina 5", 22176: "POSGAR 98 / Argentina 6", 22177: "POSGAR 98 / Argentina 7", 22181: "POSGAR 94 / Argentina 1", 22182: "POSGAR 94 / Argentina 2", 22183: "POSGAR 94 / Argentina 3", 22184: "POSGAR 94 / Argentina 4", 22185: "POSGAR 94 / Argentina 5", 22186: "POSGAR 94 / Argentina 6", 22187: "POSGAR 94 / Argentina 7", 22191: "Campo Inchauspe / Argentina 1", 22192: "Campo Inchauspe / Argentina 2", 22193: "Campo Inchauspe / Argentina 3", 22194: "Campo Inchauspe / Argentina 4", 22195: "Campo Inchauspe / Argentina 5", 22196: "Campo Inchauspe / Argentina 6", 22197: "Campo Inchauspe / Argentina 7", 22234: "Cape / UTM zone 34S", 22235: "Cape / UTM zone 35S", 22236: "Cape / UTM zone 36S", 22275: "Cape / Lo15", 22277: "Cape / Lo17", 22279: "Cape / Lo19", 22281: "Cape / Lo21", 22283: "Cape / Lo23", 22285: "Cape / Lo25", 22287: "Cape / Lo27", 22289: "Cape / Lo29", 22291: "Cape / Lo31", 22293: "Cape / Lo33", 22300: "Carthage (Paris) / Tunisia Mining Grid", 22332: "Carthage / UTM zone 32N", 22391: "Carthage / Nord Tunisie", 22392: "Carthage / Sud Tunisie", 22521: "Corrego Alegre 1970-72 / UTM zone 21S", 22522: "Corrego Alegre 1970-72 / UTM zone 22S", 22523: "Corrego Alegre 1970-72 / UTM zone 23S", 22524: "Corrego Alegre 1970-72 / UTM zone 24S", 22525: "Corrego Alegre 1970-72 / UTM zone 25S", 22700: "Deir ez Zor / Levant Zone", 22770: "Deir ez Zor / Syria Lambert", 22780: "Deir ez Zor / Levant Stereographic", 22832: "Douala / UTM zone 32N", 22991: "Egypt 1907 / Blue Belt", 22992: "Egypt 1907 / Red Belt", 22993: "Egypt 1907 / Purple Belt", 22994: "Egypt 1907 / Extended Purple Belt", 23028: "ED50 / UTM zone 28N", 23029: "ED50 / UTM zone 29N", 23030: "ED50 / UTM zone 30N", 23031: "ED50 / UTM zone 31N", 23032: "ED50 / UTM zone 32N", 23033: "ED50 / UTM zone 33N", 23034: "ED50 / UTM zone 34N", 23035: "ED50 / UTM zone 35N", 23036: "ED50 / UTM zone 36N", 23037: "ED50 / UTM zone 37N", 23038: "ED50 / UTM zone 38N", 23090: "ED50 / TM 0 N", 23095: "ED50 / TM 5 NE", 23239: "Fahud / UTM zone 39N", 23240: "Fahud / UTM zone 40N", 23433: "Garoua / UTM zone 33N", 23700: "HD72 / EOV", 23830: "DGN95 / Indonesia TM-3 zone 46.2", 23831: "DGN95 / Indonesia TM-3 zone 47.1", 23832: "DGN95 / Indonesia TM-3 zone 47.2", 23833: "DGN95 / Indonesia TM-3 zone 48.1", 23834: "DGN95 / Indonesia TM-3 zone 48.2", 23835: "DGN95 / Indonesia TM-3 zone 49.1", 23836: "DGN95 / Indonesia TM-3 zone 49.2", 23837: "DGN95 / Indonesia TM-3 zone 50.1", 23838: "DGN95 / Indonesia TM-3 zone 50.2", 23839: "DGN95 / Indonesia TM-3 zone 51.1", 23840: "DGN95 / Indonesia TM-3 zone 51.2", 23841: "DGN95 / Indonesia TM-3 zone 52.1", 23842: "DGN95 / Indonesia TM-3 zone 52.2", 23843: "DGN95 / Indonesia TM-3 zone 53.1", 23844: "DGN95 / Indonesia TM-3 zone 53.2", 23845: "DGN95 / Indonesia TM-3 zone 54.1", 23846: "ID74 / UTM zone 46N", 23847: "ID74 / UTM zone 47N", 23848: "ID74 / UTM zone 48N", 23849: "ID74 / UTM zone 49N", 23850: "ID74 / UTM zone 50N", 23851: "ID74 / UTM zone 51N", 23852: "ID74 / UTM zone 52N", 23853: "ID74 / UTM zone 53N", 23866: "DGN95 / UTM zone 46N", 23867: "DGN95 / UTM zone 47N", 23868: "DGN95 / UTM zone 48N", 23869: "DGN95 / UTM zone 49N", 23870: "DGN95 / UTM zone 50N", 23871: "DGN95 / UTM zone 51N", 23872: "DGN95 / UTM zone 52N", 23877: "DGN95 / UTM zone 47S", 23878: "DGN95 / UTM zone 48S", 23879: "DGN95 / UTM zone 49S", 23880: "DGN95 / UTM zone 50S", 23881: "DGN95 / UTM zone 51S", 23882: "DGN95 / UTM zone 52S", 23883: "DGN95 / UTM zone 53S", 23884: "DGN95 / UTM zone 54S", 23886: "ID74 / UTM zone 46S", 23887: "ID74 / UTM zone 47S", 23888: "ID74 / UTM zone 48S", 23889: "ID74 / UTM zone 49S", 23890: "ID74 / UTM zone 50S", 23891: "ID74 / UTM zone 51S", 23892: "ID74 / UTM zone 52S", 23893: "ID74 / UTM zone 53S", 23894: "ID74 / UTM zone 54S", 23946: "Indian 1954 / UTM zone 46N", 23947: "Indian 1954 / UTM zone 47N", 23948: "Indian 1954 / UTM zone 48N", 24047: "Indian 1975 / UTM zone 47N", 24048: "Indian 1975 / UTM zone 48N", 24100: "Jamaica 1875 / Jamaica (Old Grid)", 24200: "JAD69 / Jamaica National Grid", 24305: "Kalianpur 1937 / UTM zone 45N", 24306: "Kalianpur 1937 / UTM zone 46N", 24311: "Kalianpur 1962 / UTM zone 41N", 24312: "Kalianpur 1962 / UTM zone 42N", 24313: "Kalianpur 1962 / UTM zone 43N", 24342: "Kalianpur 1975 / UTM zone 42N", 24343: "Kalianpur 1975 / UTM zone 43N", 24344: "Kalianpur 1975 / UTM zone 44N", 24345: "Kalianpur 1975 / UTM zone 45N", 24346: "Kalianpur 1975 / UTM zone 46N", 24347: "Kalianpur 1975 / UTM zone 47N", 24370: "Kalianpur 1880 / India zone 0", 24371: "Kalianpur 1880 / India zone I", 24372: "Kalianpur 1880 / India zone IIa", 24373: "Kalianpur 1880 / India zone IIIa", 24374: "Kalianpur 1880 / India zone IVa", 24375: "Kalianpur 1937 / India zone IIb", 24376: "Kalianpur 1962 / India zone I", 24377: "Kalianpur 1962 / India zone IIa", 24378: "Kalianpur 1975 / India zone I", 24379: "Kalianpur 1975 / India zone IIa", 24380: "Kalianpur 1975 / India zone IIb", 24381: "Kalianpur 1975 / India zone IIIa", 24382: "Kalianpur 1880 / India zone IIb", 24383: "Kalianpur 1975 / India zone IVa", 24500: "Kertau 1968 / Singapore Grid", 24547: "Kertau 1968 / UTM zone 47N", 24548: "Kertau 1968 / UTM zone 48N", 24571: "Kertau / R.S.O. Malaya (ch)", 24600: "KOC Lambert", 24718: "La Canoa / UTM zone 18N", 24719: "La Canoa / UTM zone 19N", 24720: "La Canoa / UTM zone 20N", 24817: "PSAD56 / UTM zone 17N", 24818: "PSAD56 / UTM zone 18N", 24819: "PSAD56 / UTM zone 19N", 24820: "PSAD56 / UTM zone 20N", 24821: "PSAD56 / UTM zone 21N", 24877: "PSAD56 / UTM zone 17S", 24878: "PSAD56 / UTM zone 18S", 24879: "PSAD56 / UTM zone 19S", 24880: "PSAD56 / UTM zone 20S", 24881: "PSAD56 / UTM zone 21S", 24882: "PSAD56 / UTM zone 22S", 24891: "PSAD56 / Peru west zone", 24892: "PSAD56 / Peru central zone", 24893: "PSAD56 / Peru east zone", 25000: "Leigon / Ghana Metre Grid", 25231: "Lome / UTM zone 31N", 25391: "Luzon 1911 / Philippines zone I", 25392: "Luzon 1911 / Philippines zone II", 25393: "Luzon 1911 / Philippines zone III", 25394: "Luzon 1911 / Philippines zone IV", 25395: "Luzon 1911 / Philippines zone V", 25700: "Makassar (Jakarta) / NEIEZ", 25828: "ETRS89 / UTM zone 28N", 25829: "ETRS89 / UTM zone 29N", 25830: "ETRS89 / UTM zone 30N", 25831: "ETRS89 / UTM zone 31N", 25832: "ETRS89 / UTM zone 32N", 25833: "ETRS89 / UTM zone 33N", 25834: "ETRS89 / UTM zone 34N", 25835: "ETRS89 / UTM zone 35N", 25836: "ETRS89 / UTM zone 36N", 25837: "ETRS89 / UTM zone 37N", 25838: "ETRS89 / UTM zone 38N", 25884: "ETRS89 / TM Baltic93", 25932: "Malongo 1987 / UTM zone 32S", 26191: "Merchich / Nord Maroc", 26192: "Merchich / Sud Maroc", 26193: "Merchich / Sahara", 26194: "Merchich / Sahara Nord", 26195: "Merchich / Sahara Sud", 26237: "Massawa / UTM zone 37N", 26331: "Minna / UTM zone 31N", 26332: "Minna / UTM zone 32N", 26391: "Minna / Nigeria West Belt", 26392: "Minna / Nigeria Mid Belt", 26393: "Minna / Nigeria East Belt", 26432: "Mhast / UTM zone 32S", 26591: "Monte Mario (Rome) / Italy zone 1", 26592: "Monte Mario (Rome) / Italy zone 2", 26632: "M'poraloko / UTM zone 32N", 26692: "M'poraloko / UTM zone 32S", 26701: "NAD27 / UTM zone 1N", 26702: "NAD27 / UTM zone 2N", 26703: "NAD27 / UTM zone 3N", 26704: "NAD27 / UTM zone 4N", 26705: "NAD27 / UTM zone 5N", 26706: "NAD27 / UTM zone 6N", 26707: "NAD27 / UTM zone 7N", 26708: "NAD27 / UTM zone 8N", 26709: "NAD27 / UTM zone 9N", 26710: "NAD27 / UTM zone 10N", 26711: "NAD27 / UTM zone 11N", 26712: "NAD27 / UTM zone 12N", 26713: "NAD27 / UTM zone 13N", 26714: "NAD27 / UTM zone 14N", 26715: "NAD27 / UTM zone 15N", 26716: "NAD27 / UTM zone 16N", 26717: "NAD27 / UTM zone 17N", 26718: "NAD27 / UTM zone 18N", 26719: "NAD27 / UTM zone 19N", 26720: "NAD27 / UTM zone 20N", 26721: "NAD27 / UTM zone 21N", 26722: "NAD27 / UTM zone 22N", 26729: "NAD27 / Alabama East", 26730: "NAD27 / Alabama West", 26731: "NAD27 / Alaska zone 1", 26732: "NAD27 / Alaska zone 2", 26733: "NAD27 / Alaska zone 3", 26734: "NAD27 / Alaska zone 4", 26735: "NAD27 / Alaska zone 5", 26736: "NAD27 / Alaska zone 6", 26737: "NAD27 / Alaska zone 7", 26738: "NAD27 / Alaska zone 8", 26739: "NAD27 / Alaska zone 9", 26740: "NAD27 / Alaska zone 10", 26741: "NAD27 / California zone I", 26742: "NAD27 / California zone II", 26743: "NAD27 / California zone III", 26744: "NAD27 / California zone IV", 26745: "NAD27 / California zone V", 26746: "NAD27 / California zone VI", 26747: "NAD27 / California zone VII", 26748: "NAD27 / Arizona East", 26749: "NAD27 / Arizona Central", 26750: "NAD27 / Arizona West", 26751: "NAD27 / Arkansas North", 26752: "NAD27 / Arkansas South", 26753: "NAD27 / Colorado North", 26754: "NAD27 / Colorado Central", 26755: "NAD27 / Colorado South", 26756: "NAD27 / Connecticut", 26757: "NAD27 / Delaware", 26758: "NAD27 / Florida East", 26759: "NAD27 / Florida West", 26760: "NAD27 / Florida North", 26766: "NAD27 / Georgia East", 26767: "NAD27 / Georgia West", 26768: "NAD27 / Idaho East", 26769: "NAD27 / Idaho Central", 26770: "NAD27 / Idaho West", 26771: "NAD27 / Illinois East", 26772: "NAD27 / Illinois West", 26773: "NAD27 / Indiana East", 26774: "NAD27 / Indiana West", 26775: "NAD27 / Iowa North", 26776: "NAD27 / Iowa South", 26777: "NAD27 / Kansas North", 26778: "NAD27 / Kansas South", 26779: "NAD27 / Kentucky North", 26780: "NAD27 / Kentucky South", 26781: "NAD27 / Louisiana North", 26782: "NAD27 / Louisiana South", 26783: "NAD27 / Maine East", 26784: "NAD27 / Maine West", 26785: "NAD27 / Maryland", 26786: "NAD27 / Massachusetts Mainland", 26787: "NAD27 / Massachusetts Island", 26791: "NAD27 / Minnesota North", 26792: "NAD27 / Minnesota Central", 26793: "NAD27 / Minnesota South", 26794: "NAD27 / Mississippi East", 26795: "NAD27 / Mississippi West", 26796: "NAD27 / Missouri East", 26797: "NAD27 / Missouri Central", 26798: "NAD27 / Missouri West", 26799: "NAD27 / California zone VII", 26801: "NAD Michigan / Michigan East", 26802: "NAD Michigan / Michigan Old Central", 26803: "NAD Michigan / Michigan West", 26811: "NAD Michigan / Michigan North", 26812: "NAD Michigan / Michigan Central", 26813: "NAD Michigan / Michigan South", 26814: "NAD83 / Maine East (ftUS)", 26815: "NAD83 / Maine West (ftUS)", 26819: "NAD83 / Minnesota North (ftUS)", 26820: "NAD83 / Minnesota Central (ftUS)", 26821: "NAD83 / Minnesota South (ftUS)", 26822: "NAD83 / Nebraska (ftUS)", 26823: "NAD83 / West Virginia North (ftUS)", 26824: "NAD83 / West Virginia South (ftUS)", 26825: "NAD83(HARN) / Maine East (ftUS)", 26826: "NAD83(HARN) / Maine West (ftUS)", 26830: "NAD83(HARN) / Minnesota North (ftUS)", 26831: "NAD83(HARN) / Minnesota Central (ftUS)", 26832: "NAD83(HARN) / Minnesota South (ftUS)", 26833: "NAD83(HARN) / Nebraska (ftUS)", 26834: "NAD83(HARN) / West Virginia North (ftUS)", 26835: "NAD83(HARN) / West Virginia South (ftUS)", 26836: "NAD83(NSRS2007) / Maine East (ftUS)", 26837: "NAD83(NSRS2007) / Maine West (ftUS)", 26841: "NAD83(NSRS2007) / Minnesota North (ftUS)", 26842: "NAD83(NSRS2007) / Minnesota Central (ftUS)", 26843: "NAD83(NSRS2007) / Minnesota South (ftUS)", 26844: "NAD83(NSRS2007) / Nebraska (ftUS)", 26845: "NAD83(NSRS2007) / West Virginia North (ftUS)", 26846: "NAD83(NSRS2007) / West Virginia South (ftUS)", 26847: "NAD83 / Maine East (ftUS)", 26848: "NAD83 / Maine West (ftUS)", 26849: "NAD83 / Minnesota North (ftUS)", 26850: "NAD83 / Minnesota Central (ftUS)", 26851: "NAD83 / Minnesota South (ftUS)", 26852: "NAD83 / Nebraska (ftUS)", 26853: "NAD83 / West Virginia North (ftUS)", 26854: "NAD83 / West Virginia South (ftUS)", 26855: "NAD83(HARN) / Maine East (ftUS)", 26856: "NAD83(HARN) / Maine West (ftUS)", 26857: "NAD83(HARN) / Minnesota North (ftUS)", 26858: "NAD83(HARN) / Minnesota Central (ftUS)", 26859: "NAD83(HARN) / Minnesota South (ftUS)", 26860: "NAD83(HARN) / Nebraska (ftUS)", 26861: "NAD83(HARN) / West Virginia North (ftUS)", 26862: "NAD83(HARN) / West Virginia South (ftUS)", 26863: "NAD83(NSRS2007) / Maine East (ftUS)", 26864: "NAD83(NSRS2007) / Maine West (ftUS)", 26865: "NAD83(NSRS2007) / Minnesota North (ftUS)", 26866: "NAD83(NSRS2007) / Minnesota Central (ftUS)", 26867: "NAD83(NSRS2007) / Minnesota South (ftUS)", 26868: "NAD83(NSRS2007) / Nebraska (ftUS)", 26869: "NAD83(NSRS2007) / West Virginia North (ftUS)", 26870: "NAD83(NSRS2007) / West Virginia South (ftUS)", 26891: "NAD83(CSRS) / MTM zone 11", 26892: "NAD83(CSRS) / MTM zone 12", 26893: "NAD83(CSRS) / MTM zone 13", 26894: "NAD83(CSRS) / MTM zone 14", 26895: "NAD83(CSRS) / MTM zone 15", 26896: "NAD83(CSRS) / MTM zone 16", 26897: "NAD83(CSRS) / MTM zone 17", 26898: "NAD83(CSRS) / MTM zone 1", 26899: "NAD83(CSRS) / MTM zone 2", 26901: "NAD83 / UTM zone 1N", 26902: "NAD83 / UTM zone 2N", 26903: "NAD83 / UTM zone 3N", 26904: "NAD83 / UTM zone 4N", 26905: "NAD83 / UTM zone 5N", 26906: "NAD83 / UTM zone 6N", 26907: "NAD83 / UTM zone 7N", 26908: "NAD83 / UTM zone 8N", 26909: "NAD83 / UTM zone 9N", 26910: "NAD83 / UTM zone 10N", 26911: "NAD83 / UTM zone 11N", 26912: "NAD83 / UTM zone 12N", 26913: "NAD83 / UTM zone 13N", 26914: "NAD83 / UTM zone 14N", 26915: "NAD83 / UTM zone 15N", 26916: "NAD83 / UTM zone 16N", 26917: "NAD83 / UTM zone 17N", 26918: "NAD83 / UTM zone 18N", 26919: "NAD83 / UTM zone 19N", 26920: "NAD83 / UTM zone 20N", 26921: "NAD83 / UTM zone 21N", 26922: "NAD83 / UTM zone 22N", 26923: "NAD83 / UTM zone 23N", 26929: "NAD83 / Alabama East", 26930: "NAD83 / Alabama West", 26931: "NAD83 / Alaska zone 1", 26932: "NAD83 / Alaska zone 2", 26933: "NAD83 / Alaska zone 3", 26934: "NAD83 / Alaska zone 4", 26935: "NAD83 / Alaska zone 5", 26936: "NAD83 / Alaska zone 6", 26937: "NAD83 / Alaska zone 7", 26938: "NAD83 / Alaska zone 8", 26939: "NAD83 / Alaska zone 9", 26940: "NAD83 / Alaska zone 10", 26941: "NAD83 / California zone 1", 26942: "NAD83 / California zone 2", 26943: "NAD83 / California zone 3", 26944: "NAD83 / California zone 4", 26945: "NAD83 / California zone 5", 26946: "NAD83 / California zone 6", 26948: "NAD83 / Arizona East", 26949: "NAD83 / Arizona Central", 26950: "NAD83 / Arizona West", 26951: "NAD83 / Arkansas North", 26952: "NAD83 / Arkansas South", 26953: "NAD83 / Colorado North", 26954: "NAD83 / Colorado Central", 26955: "NAD83 / Colorado South", 26956: "NAD83 / Connecticut", 26957: "NAD83 / Delaware", 26958: "NAD83 / Florida East", 26959: "NAD83 / Florida West", 26960: "NAD83 / Florida North", 26961: "NAD83 / Hawaii zone 1", 26962: "NAD83 / Hawaii zone 2", 26963: "NAD83 / Hawaii zone 3", 26964: "NAD83 / Hawaii zone 4", 26965: "NAD83 / Hawaii zone 5", 26966: "NAD83 / Georgia East", 26967: "NAD83 / Georgia West", 26968: "NAD83 / Idaho East", 26969: "NAD83 / Idaho Central", 26970: "NAD83 / Idaho West", 26971: "NAD83 / Illinois East", 26972: "NAD83 / Illinois West", 26973: "NAD83 / Indiana East", 26974: "NAD83 / Indiana West", 26975: "NAD83 / Iowa North", 26976: "NAD83 / Iowa South", 26977: "NAD83 / Kansas North", 26978: "NAD83 / Kansas South", 26979: "NAD83 / Kentucky North", 26980: "NAD83 / Kentucky South", 26981: "NAD83 / Louisiana North", 26982: "NAD83 / Louisiana South", 26983: "NAD83 / Maine East", 26984: "NAD83 / Maine West", 26985: "NAD83 / Maryland", 26986: "NAD83 / Massachusetts Mainland", 26987: "NAD83 / Massachusetts Island", 26988: "NAD83 / Michigan North", 26989: "NAD83 / Michigan Central", 26990: "NAD83 / Michigan South", 26991: "NAD83 / Minnesota North", 26992: "NAD83 / Minnesota Central", 26993: "NAD83 / Minnesota South", 26994: "NAD83 / Mississippi East", 26995: "NAD83 / Mississippi West", 26996: "NAD83 / Missouri East", 26997: "NAD83 / Missouri Central", 26998: "NAD83 / Missouri West", 27037: "Nahrwan 1967 / UTM zone 37N", 27038: "Nahrwan 1967 / UTM zone 38N", 27039: "Nahrwan 1967 / UTM zone 39N", 27040: "Nahrwan 1967 / UTM zone 40N", 27120: "Naparima 1972 / UTM zone 20N", 27200: "NZGD49 / New Zealand Map Grid", 27205: "NZGD49 / Mount Eden Circuit", 27206: "NZGD49 / Bay of Plenty Circuit", 27207: "NZGD49 / Poverty Bay Circuit", 27208: "NZGD49 / Hawkes Bay Circuit", 27209: "NZGD49 / Taranaki Circuit", 27210: "NZGD49 / Tuhirangi Circuit", 27211: "NZGD49 / Wanganui Circuit", 27212: "NZGD49 / Wairarapa Circuit", 27213: "NZGD49 / Wellington Circuit", 27214: "NZGD49 / Collingwood Circuit", 27215: "NZGD49 / Nelson Circuit", 27216: "NZGD49 / Karamea Circuit", 27217: "NZGD49 / Buller Circuit", 27218: "NZGD49 / Grey Circuit", 27219: "NZGD49 / Amuri Circuit", 27220: "NZGD49 / Marlborough Circuit", 27221: "NZGD49 / Hokitika Circuit", 27222: "NZGD49 / Okarito Circuit", 27223: "NZGD49 / Jacksons Bay Circuit", 27224: "NZGD49 / Mount Pleasant Circuit", 27225: "NZGD49 / Gawler Circuit", 27226: "NZGD49 / Timaru Circuit", 27227: "NZGD49 / Lindis Peak Circuit", 27228: "NZGD49 / Mount Nicholas Circuit", 27229: "NZGD49 / Mount York Circuit", 27230: "NZGD49 / Observation Point Circuit", 27231: "NZGD49 / North Taieri Circuit", 27232: "NZGD49 / Bluff Circuit", 27258: "NZGD49 / UTM zone 58S", 27259: "NZGD49 / UTM zone 59S", 27260: "NZGD49 / UTM zone 60S", 27291: "NZGD49 / North Island Grid", 27292: "NZGD49 / South Island Grid", 27391: "NGO 1948 (Oslo) / NGO zone I", 27392: "NGO 1948 (Oslo) / NGO zone II", 27393: "NGO 1948 (Oslo) / NGO zone III", 27394: "NGO 1948 (Oslo) / NGO zone IV", 27395: "NGO 1948 (Oslo) / NGO zone V", 27396: "NGO 1948 (Oslo) / NGO zone VI", 27397: "NGO 1948 (Oslo) / NGO zone VII", 27398: "NGO 1948 (Oslo) / NGO zone VIII", 27429: "Datum 73 / UTM zone 29N", 27492: "Datum 73 / Modified Portuguese Grid", 27493: "Datum 73 / Modified Portuguese Grid", 27500: "ATF (Paris) / Nord de Guerre", 27561: "NTF (Paris) / Lambert Nord France", 27562: "NTF (Paris) / Lambert Centre France", 27563: "NTF (Paris) / Lambert Sud France", 27564: "NTF (Paris) / Lambert Corse", 27571: "NTF (Paris) / Lambert zone I", 27572: "NTF (Paris) / Lambert zone II", 27573: "NTF (Paris) / Lambert zone III", 27574: "NTF (Paris) / Lambert zone IV", 27581: "NTF (Paris) / France I", 27582: "NTF (Paris) / France II", 27583: "NTF (Paris) / France III", 27584: "NTF (Paris) / France IV", 27591: "NTF (Paris) / Nord France", 27592: "NTF (Paris) / Centre France", 27593: "NTF (Paris) / Sud France", 27594: "NTF (Paris) / Corse", 27700: "OSGB 1936 / British National Grid", 28191: "Palestine 1923 / Palestine Grid", 28192: "Palestine 1923 / Palestine Belt", 28193: "Palestine 1923 / Israeli CS Grid", 28232: "Pointe Noire / UTM zone 32S", 28348: "GDA94 / MGA zone 48", 28349: "GDA94 / MGA zone 49", 28350: "GDA94 / MGA zone 50", 28351: "GDA94 / MGA zone 51", 28352: "GDA94 / MGA zone 52", 28353: "GDA94 / MGA zone 53", 28354: "GDA94 / MGA zone 54", 28355: "GDA94 / MGA zone 55", 28356: "GDA94 / MGA zone 56", 28357: "GDA94 / MGA zone 57", 28358: "GDA94 / MGA zone 58", 28402: "Pulkovo 1942 / Gauss-Kruger zone 2", 28403: "Pulkovo 1942 / Gauss-Kruger zone 3", 28404: "Pulkovo 1942 / Gauss-Kruger zone 4", 28405: "Pulkovo 1942 / Gauss-Kruger zone 5", 28406: "Pulkovo 1942 / Gauss-Kruger zone 6", 28407: "Pulkovo 1942 / Gauss-Kruger zone 7", 28408: "Pulkovo 1942 / Gauss-Kruger zone 8", 28409: "Pulkovo 1942 / Gauss-Kruger zone 9", 28410: "Pulkovo 1942 / Gauss-Kruger zone 10", 28411: "Pulkovo 1942 / Gauss-Kruger zone 11", 28412: "Pulkovo 1942 / Gauss-Kruger zone 12", 28413: "Pulkovo 1942 / Gauss-Kruger zone 13", 28414: "Pulkovo 1942 / Gauss-Kruger zone 14", 28415: "Pulkovo 1942 / Gauss-Kruger zone 15", 28416: "Pulkovo 1942 / Gauss-Kruger zone 16", 28417: "Pulkovo 1942 / Gauss-Kruger zone 17", 28418: "Pulkovo 1942 / Gauss-Kruger zone 18", 28419: "Pulkovo 1942 / Gauss-Kruger zone 19", 28420: "Pulkovo 1942 / Gauss-Kruger zone 20", 28421: "Pulkovo 1942 / Gauss-Kruger zone 21", 28422: "Pulkovo 1942 / Gauss-Kruger zone 22", 28423: "Pulkovo 1942 / Gauss-Kruger zone 23", 28424: "Pulkovo 1942 / Gauss-Kruger zone 24", 28425: "Pulkovo 1942 / Gauss-Kruger zone 25", 28426: "Pulkovo 1942 / Gauss-Kruger zone 26", 28427: "Pulkovo 1942 / Gauss-Kruger zone 27", 28428: "Pulkovo 1942 / Gauss-Kruger zone 28", 28429: "Pulkovo 1942 / Gauss-Kruger zone 29", 28430: "Pulkovo 1942 / Gauss-Kruger zone 30", 28431: "Pulkovo 1942 / Gauss-Kruger zone 31", 28432: "Pulkovo 1942 / Gauss-Kruger zone 32", 28462: "Pulkovo 1942 / Gauss-Kruger 2N", 28463: "Pulkovo 1942 / Gauss-Kruger 3N", 28464: "Pulkovo 1942 / Gauss-Kruger 4N", 28465: "Pulkovo 1942 / Gauss-Kruger 5N", 28466: "Pulkovo 1942 / Gauss-Kruger 6N", 28467: "Pulkovo 1942 / Gauss-Kruger 7N", 28468: "Pulkovo 1942 / Gauss-Kruger 8N", 28469: "Pulkovo 1942 / Gauss-Kruger 9N", 28470: "Pulkovo 1942 / Gauss-Kruger 10N", 28471: "Pulkovo 1942 / Gauss-Kruger 11N", 28472: "Pulkovo 1942 / Gauss-Kruger 12N", 28473: "Pulkovo 1942 / Gauss-Kruger 13N", 28474: "Pulkovo 1942 / Gauss-Kruger 14N", 28475: "Pulkovo 1942 / Gauss-Kruger 15N", 28476: "Pulkovo 1942 / Gauss-Kruger 16N", 28477: "Pulkovo 1942 / Gauss-Kruger 17N", 28478: "Pulkovo 1942 / Gauss-Kruger 18N", 28479: "Pulkovo 1942 / Gauss-Kruger 19N", 28480: "Pulkovo 1942 / Gauss-Kruger 20N", 28481: "Pulkovo 1942 / Gauss-Kruger 21N", 28482: "Pulkovo 1942 / Gauss-Kruger 22N", 28483: "Pulkovo 1942 / Gauss-Kruger 23N", 28484: "Pulkovo 1942 / Gauss-Kruger 24N", 28485: "Pulkovo 1942 / Gauss-Kruger 25N", 28486: "Pulkovo 1942 / Gauss-Kruger 26N", 28487: "Pulkovo 1942 / Gauss-Kruger 27N", 28488: "Pulkovo 1942 / Gauss-Kruger 28N", 28489: "Pulkovo 1942 / Gauss-Kruger 29N", 28490: "Pulkovo 1942 / Gauss-Kruger 30N", 28491: "Pulkovo 1942 / Gauss-Kruger 31N", 28492: "Pulkovo 1942 / Gauss-Kruger 32N", 28600: "Qatar 1974 / Qatar National Grid", 28991: "Amersfoort / RD Old", 28992: "Amersfoort / RD New", 29100: "SAD69 / Brazil Polyconic", 29101: "SAD69 / Brazil Polyconic", 29118: "SAD69 / UTM zone 18N", 29119: "SAD69 / UTM zone 19N", 29120: "SAD69 / UTM zone 20N", 29121: "SAD69 / UTM zone 21N", 29122: "SAD69 / UTM zone 22N", 29168: "SAD69 / UTM zone 18N", 29169: "SAD69 / UTM zone 19N", 29170: "SAD69 / UTM zone 20N", 29171: "SAD69 / UTM zone 21N", 29172: "SAD69 / UTM zone 22N", 29177: "SAD69 / UTM zone 17S", 29178: "SAD69 / UTM zone 18S", 29179: "SAD69 / UTM zone 19S", 29180: "SAD69 / UTM zone 20S", 29181: "SAD69 / UTM zone 21S", 29182: "SAD69 / UTM zone 22S", 29183: "SAD69 / UTM zone 23S", 29184: "SAD69 / UTM zone 24S", 29185: "SAD69 / UTM zone 25S", 29187: "SAD69 / UTM zone 17S", 29188: "SAD69 / UTM zone 18S", 29189: "SAD69 / UTM zone 19S", 29190: "SAD69 / UTM zone 20S", 29191: "SAD69 / UTM zone 21S", 29192: "SAD69 / UTM zone 22S", 29193: "SAD69 / UTM zone 23S", 29194: "SAD69 / UTM zone 24S", 29195: "SAD69 / UTM zone 25S", 29220: "Sapper Hill 1943 / UTM zone 20S", 29221: "Sapper Hill 1943 / UTM zone 21S", 29333: "Schwarzeck / UTM zone 33S", 29371: "Schwarzeck / Lo22/11", 29373: "Schwarzeck / Lo22/13", 29375: "Schwarzeck / Lo22/15", 29377: "Schwarzeck / Lo22/17", 29379: "Schwarzeck / Lo22/19", 29381: "Schwarzeck / Lo22/21", 29383: "Schwarzeck / Lo22/23", 29385: "Schwarzeck / Lo22/25", 29635: "Sudan / UTM zone 35N", 29636: "Sudan / UTM zone 36N", 29700: "Tananarive (Paris) / Laborde Grid", 29701: "Tananarive (Paris) / Laborde Grid", 29702: "Tananarive (Paris) / Laborde Grid approximation", 29738: "Tananarive / UTM zone 38S", 29739: "Tananarive / UTM zone 39S", 29849: "Timbalai 1948 / UTM zone 49N", 29850: "Timbalai 1948 / UTM zone 50N", 29871: "Timbalai 1948 / RSO Borneo (ch)", 29872: "Timbalai 1948 / RSO Borneo (ft)", 29873: "Timbalai 1948 / RSO Borneo (m)", 29900: "TM65 / Irish National Grid", 29901: "OSNI 1952 / Irish National Grid", 29902: "TM65 / Irish Grid", 29903: "TM75 / Irish Grid", 30161: "Tokyo / Japan Plane Rectangular CS I", 30162: "Tokyo / Japan Plane Rectangular CS II", 30163: "Tokyo / Japan Plane Rectangular CS III", 30164: "Tokyo / Japan Plane Rectangular CS IV", 30165: "Tokyo / Japan Plane Rectangular CS V", 30166: "Tokyo / Japan Plane Rectangular CS VI", 30167: "Tokyo / Japan Plane Rectangular CS VII", 30168: "Tokyo / Japan Plane Rectangular CS VIII", 30169: "Tokyo / Japan Plane Rectangular CS IX", 30170: "Tokyo / Japan Plane Rectangular CS X", 30171: "Tokyo / Japan Plane Rectangular CS XI", 30172: "Tokyo / Japan Plane Rectangular CS XII", 30173: "Tokyo / Japan Plane Rectangular CS XIII", 30174: "Tokyo / Japan Plane Rectangular CS XIV", 30175: "Tokyo / Japan Plane Rectangular CS XV", 30176: "Tokyo / Japan Plane Rectangular CS XVI", 30177: "Tokyo / Japan Plane Rectangular CS XVII", 30178: "Tokyo / Japan Plane Rectangular CS XVIII", 30179: "Tokyo / Japan Plane Rectangular CS XIX", 30200: "Trinidad 1903 / Trinidad Grid", 30339: "TC(1948) / UTM zone 39N", 30340: "TC(1948) / UTM zone 40N", 30491: "Voirol 1875 / Nord Algerie (ancienne)", 30492: "Voirol 1875 / Sud Algerie (ancienne)", 30493: "Voirol 1879 / Nord Algerie (ancienne)", 30494: "Voirol 1879 / Sud Algerie (ancienne)", 30729: "Nord Sahara 1959 / UTM zone 29N", 30730: "Nord Sahara 1959 / UTM zone 30N", 30731: "Nord Sahara 1959 / UTM zone 31N", 30732: "Nord Sahara 1959 / UTM zone 32N", 30791: "Nord Sahara 1959 / Nord Algerie", 30792: "Nord Sahara 1959 / Sud Algerie", 30800: "RT38 2.5 gon W", 31028: "Yoff / UTM zone 28N", 31121: "Zanderij / UTM zone 21N", 31154: "Zanderij / TM 54 NW", 31170: "Zanderij / Suriname Old TM", 31171: "Zanderij / Suriname TM", 31251: "MGI (Ferro) / Austria GK West Zone", 31252: "MGI (Ferro) / Austria GK Central Zone", 31253: "MGI (Ferro) / Austria GK East Zone", 31254: "MGI / Austria GK West", 31255: "MGI / Austria GK Central", 31256: "MGI / Austria GK East", 31257: "MGI / Austria GK M28", 31258: "MGI / Austria GK M31", 31259: "MGI / Austria GK M34", 31265: "MGI / 3-degree Gauss zone 5", 31266: "MGI / 3-degree Gauss zone 6", 31267: "MGI / 3-degree Gauss zone 7", 31268: "MGI / 3-degree Gauss zone 8", 31275: "MGI / Balkans zone 5", 31276: "MGI / Balkans zone 6", 31277: "MGI / Balkans zone 7", 31278: "MGI / Balkans zone 8", 31279: "MGI / Balkans zone 8", 31281: "MGI (Ferro) / Austria West Zone", 31282: "MGI (Ferro) / Austria Central Zone", 31283: "MGI (Ferro) / Austria East Zone", 31284: "MGI / Austria M28", 31285: "MGI / Austria M31", 31286: "MGI / Austria M34", 31287: "MGI / Austria Lambert", 31288: "MGI (Ferro) / M28", 31289: "MGI (Ferro) / M31", 31290: "MGI (Ferro) / M34", 31291: "MGI (Ferro) / Austria West Zone", 31292: "MGI (Ferro) / Austria Central Zone", 31293: "MGI (Ferro) / Austria East Zone", 31294: "MGI / M28", 31295: "MGI / M31", 31296: "MGI / M34", 31297: "MGI / Austria Lambert", 31300: "Belge 1972 / Belge Lambert 72", 31370: "Belge 1972 / Belgian Lambert 72", 31461: "DHDN / 3-degree Gauss zone 1", 31462: "DHDN / 3-degree Gauss zone 2", 31463: "DHDN / 3-degree Gauss zone 3", 31464: "DHDN / 3-degree Gauss zone 4", 31465: "DHDN / 3-degree Gauss zone 5", 31466: "DHDN / 3-degree Gauss-Kruger zone 2", 31467: "DHDN / 3-degree Gauss-Kruger zone 3", 31468: "DHDN / 3-degree Gauss-Kruger zone 4", 31469: "DHDN / 3-degree Gauss-Kruger zone 5", 31528: "Conakry 1905 / UTM zone 28N", 31529: "Conakry 1905 / UTM zone 29N", 31600: "Dealul Piscului 1930 / Stereo 33", 31700: "Dealul Piscului 1970/ Stereo 70", 31838: "NGN / UTM zone 38N", 31839: "NGN / UTM zone 39N", 31900: "KUDAMS / KTM", 31901: "KUDAMS / KTM", 31965: "SIRGAS 2000 / UTM zone 11N", 31966: "SIRGAS 2000 / UTM zone 12N", 31967: "SIRGAS 2000 / UTM zone 13N", 31968: "SIRGAS 2000 / UTM zone 14N", 31969: "SIRGAS 2000 / UTM zone 15N", 31970: "SIRGAS 2000 / UTM zone 16N", 31971: "SIRGAS 2000 / UTM zone 17N", 31972: "SIRGAS 2000 / UTM zone 18N", 31973: "SIRGAS 2000 / UTM zone 19N", 31974: "SIRGAS 2000 / UTM zone 20N", 31975: "SIRGAS 2000 / UTM zone 21N", 31976: "SIRGAS 2000 / UTM zone 22N", 31977: "SIRGAS 2000 / UTM zone 17S", 31978: "SIRGAS 2000 / UTM zone 18S", 31979: "SIRGAS 2000 / UTM zone 19S", 31980: "SIRGAS 2000 / UTM zone 20S", 31981: "SIRGAS 2000 / UTM zone 21S", 31982: "SIRGAS 2000 / UTM zone 22S", 31983: "SIRGAS 2000 / UTM zone 23S", 31984: "SIRGAS 2000 / UTM zone 24S", 31985: "SIRGAS 2000 / UTM zone 25S", 31986: "SIRGAS 1995 / UTM zone 17N", 31987: "SIRGAS 1995 / UTM zone 18N", 31988: "SIRGAS 1995 / UTM zone 19N", 31989: "SIRGAS 1995 / UTM zone 20N", 31990: "SIRGAS 1995 / UTM zone 21N", 31991: "SIRGAS 1995 / UTM zone 22N", 31992: "SIRGAS 1995 / UTM zone 17S", 31993: "SIRGAS 1995 / UTM zone 18S", 31994: "SIRGAS 1995 / UTM zone 19S", 31995: "SIRGAS 1995 / UTM zone 20S", 31996: "SIRGAS 1995 / UTM zone 21S", 31997: "SIRGAS 1995 / UTM zone 22S", 31998: "SIRGAS 1995 / UTM zone 23S", 31999: "SIRGAS 1995 / UTM zone 24S", 32000: "SIRGAS 1995 / UTM zone 25S", 32001: "NAD27 / Montana North", 32002: "NAD27 / Montana Central", 32003: "NAD27 / Montana South", 32005: "NAD27 / Nebraska North", 32006: "NAD27 / Nebraska South", 32007: "NAD27 / Nevada East", 32008: "NAD27 / Nevada Central", 32009: "NAD27 / Nevada West", 32010: "NAD27 / New Hampshire", 32011: "NAD27 / New Jersey", 32012: "NAD27 / New Mexico East", 32013: "NAD27 / New Mexico Central", 32014: "NAD27 / New Mexico West", 32015: "NAD27 / New York East", 32016: "NAD27 / New York Central", 32017: "NAD27 / New York West", 32018: "NAD27 / New York Long Island", 32019: "NAD27 / North Carolina", 32020: "NAD27 / North Dakota North", 32021: "NAD27 / North Dakota South", 32022: "NAD27 / Ohio North", 32023: "NAD27 / Ohio South", 32024: "NAD27 / Oklahoma North", 32025: "NAD27 / Oklahoma South", 32026: "NAD27 / Oregon North", 32027: "NAD27 / Oregon South", 32028: "NAD27 / Pennsylvania North", 32029: "NAD27 / Pennsylvania South", 32030: "NAD27 / Rhode Island", 32031: "NAD27 / South Carolina North", 32033: "NAD27 / South Carolina South", 32034: "NAD27 / South Dakota North", 32035: "NAD27 / South Dakota South", 32036: "NAD27 / Tennessee", 32037: "NAD27 / Texas North", 32038: "NAD27 / Texas North Central", 32039: "NAD27 / Texas Central", 32040: "NAD27 / Texas South Central", 32041: "NAD27 / Texas South", 32042: "NAD27 / Utah North", 32043: "NAD27 / Utah Central", 32044: "NAD27 / Utah South", 32045: "NAD27 / Vermont", 32046: "NAD27 / Virginia North", 32047: "NAD27 / Virginia South", 32048: "NAD27 / Washington North", 32049: "NAD27 / Washington South", 32050: "NAD27 / West Virginia North", 32051: "NAD27 / West Virginia South", 32052: "NAD27 / Wisconsin North", 32053: "NAD27 / Wisconsin Central", 32054: "NAD27 / Wisconsin South", 32055: "NAD27 / Wyoming East", 32056: "NAD27 / Wyoming East Central", 32057: "NAD27 / Wyoming West Central", 32058: "NAD27 / Wyoming West", 32061: "NAD27 / Guatemala Norte", 32062: "NAD27 / Guatemala Sur", 32064: "NAD27 / BLM 14N (ftUS)", 32065: "NAD27 / BLM 15N (ftUS)", 32066: "NAD27 / BLM 16N (ftUS)", 32067: "NAD27 / BLM 17N (ftUS)", 32074: "NAD27 / BLM 14N (feet)", 32075: "NAD27 / BLM 15N (feet)", 32076: "NAD27 / BLM 16N (feet)", 32077: "NAD27 / BLM 17N (feet)", 32081: "NAD27 / MTM zone 1", 32082: "NAD27 / MTM zone 2", 32083: "NAD27 / MTM zone 3", 32084: "NAD27 / MTM zone 4", 32085: "NAD27 / MTM zone 5", 32086: "NAD27 / MTM zone 6", 32098: "NAD27 / Quebec Lambert", 32099: "NAD27 / Louisiana Offshore", 32100: "NAD83 / Montana", 32104: "NAD83 / Nebraska", 32107: "NAD83 / Nevada East", 32108: "NAD83 / Nevada Central", 32109: "NAD83 / Nevada West", 32110: "NAD83 / New Hampshire", 32111: "NAD83 / New Jersey", 32112: "NAD83 / New Mexico East", 32113: "NAD83 / New Mexico Central", 32114: "NAD83 / New Mexico West", 32115: "NAD83 / New York East", 32116: "NAD83 / New York Central", 32117: "NAD83 / New York West", 32118: "NAD83 / New York Long Island", 32119: "NAD83 / North Carolina", 32120: "NAD83 / North Dakota North", 32121: "NAD83 / North Dakota South", 32122: "NAD83 / Ohio North", 32123: "NAD83 / Ohio South", 32124: "NAD83 / Oklahoma North", 32125: "NAD83 / Oklahoma South", 32126: "NAD83 / Oregon North", 32127: "NAD83 / Oregon South", 32128: "NAD83 / Pennsylvania North", 32129: "NAD83 / Pennsylvania South", 32130: "NAD83 / Rhode Island", 32133: "NAD83 / South Carolina", 32134: "NAD83 / South Dakota North", 32135: "NAD83 / South Dakota South", 32136: "NAD83 / Tennessee", 32137: "NAD83 / Texas North", 32138: "NAD83 / Texas North Central", 32139: "NAD83 / Texas Central", 32140: "NAD83 / Texas South Central", 32141: "NAD83 / Texas South", 32142: "NAD83 / Utah North", 32143: "NAD83 / Utah Central", 32144: "NAD83 / Utah South", 32145: "NAD83 / Vermont", 32146: "NAD83 / Virginia North", 32147: "NAD83 / Virginia South", 32148: "NAD83 / Washington North", 32149: "NAD83 / Washington South", 32150: "NAD83 / West Virginia North", 32151: "NAD83 / West Virginia South", 32152: "NAD83 / Wisconsin North", 32153: "NAD83 / Wisconsin Central", 32154: "NAD83 / Wisconsin South", 32155: "NAD83 / Wyoming East", 32156: "NAD83 / Wyoming East Central", 32157: "NAD83 / Wyoming West Central", 32158: "NAD83 / Wyoming West", 32161: "NAD83 / Puerto Rico & Virgin Is.", 32164: "NAD83 / BLM 14N (ftUS)", 32165: "NAD83 / BLM 15N (ftUS)", 32166: "NAD83 / BLM 16N (ftUS)", 32167: "NAD83 / BLM 17N (ftUS)", 32180: "NAD83 / SCoPQ zone 2", 32181: "NAD83 / MTM zone 1", 32182: "NAD83 / MTM zone 2", 32183: "NAD83 / MTM zone 3", 32184: "NAD83 / MTM zone 4", 32185: "NAD83 / MTM zone 5", 32186: "NAD83 / MTM zone 6", 32187: "NAD83 / MTM zone 7", 32188: "NAD83 / MTM zone 8", 32189: "NAD83 / MTM zone 9", 32190: "NAD83 / MTM zone 10", 32191: "NAD83 / MTM zone 11", 32192: "NAD83 / MTM zone 12", 32193: "NAD83 / MTM zone 13", 32194: "NAD83 / MTM zone 14", 32195: "NAD83 / MTM zone 15", 32196: "NAD83 / MTM zone 16", 32197: "NAD83 / MTM zone 17", 32198: "NAD83 / Quebec Lambert", 32199: "NAD83 / Louisiana Offshore", 32201: "WGS 72 / UTM zone 1N", 32202: "WGS 72 / UTM zone 2N", 32203: "WGS 72 / UTM zone 3N", 32204: "WGS 72 / UTM zone 4N", 32205: "WGS 72 / UTM zone 5N", 32206: "WGS 72 / UTM zone 6N", 32207: "WGS 72 / UTM zone 7N", 32208: "WGS 72 / UTM zone 8N", 32209: "WGS 72 / UTM zone 9N", 32210: "WGS 72 / UTM zone 10N", 32211: "WGS 72 / UTM zone 11N", 32212: "WGS 72 / UTM zone 12N", 32213: "WGS 72 / UTM zone 13N", 32214: "WGS 72 / UTM zone 14N", 32215: "WGS 72 / UTM zone 15N", 32216: "WGS 72 / UTM zone 16N", 32217: "WGS 72 / UTM zone 17N", 32218: "WGS 72 / UTM zone 18N", 32219: "WGS 72 / UTM zone 19N", 32220: "WGS 72 / UTM zone 20N", 32221: "WGS 72 / UTM zone 21N", 32222: "WGS 72 / UTM zone 22N", 32223: "WGS 72 / UTM zone 23N", 32224: "WGS 72 / UTM zone 24N", 32225: "WGS 72 / UTM zone 25N", 32226: "WGS 72 / UTM zone 26N", 32227: "WGS 72 / UTM zone 27N", 32228: "WGS 72 / UTM zone 28N", 32229: "WGS 72 / UTM zone 29N", 32230: "WGS 72 / UTM zone 30N", 32231: "WGS 72 / UTM zone 31N", 32232: "WGS 72 / UTM zone 32N", 32233: "WGS 72 / UTM zone 33N", 32234: "WGS 72 / UTM zone 34N", 32235: "WGS 72 / UTM zone 35N", 32236: "WGS 72 / UTM zone 36N", 32237: "WGS 72 / UTM zone 37N", 32238: "WGS 72 / UTM zone 38N", 32239: "WGS 72 / UTM zone 39N", 32240: "WGS 72 / UTM zone 40N", 32241: "WGS 72 / UTM zone 41N", 32242: "WGS 72 / UTM zone 42N", 32243: "WGS 72 / UTM zone 43N", 32244: "WGS 72 / UTM zone 44N", 32245: "WGS 72 / UTM zone 45N", 32246: "WGS 72 / UTM zone 46N", 32247: "WGS 72 / UTM zone 47N", 32248: "WGS 72 / UTM zone 48N", 32249: "WGS 72 / UTM zone 49N", 32250: "WGS 72 / UTM zone 50N", 32251: "WGS 72 / UTM zone 51N", 32252: "WGS 72 / UTM zone 52N", 32253: "WGS 72 / UTM zone 53N", 32254: "WGS 72 / UTM zone 54N", 32255: "WGS 72 / UTM zone 55N", 32256: "WGS 72 / UTM zone 56N", 32257: "WGS 72 / UTM zone 57N", 32258: "WGS 72 / UTM zone 58N", 32259: "WGS 72 / UTM zone 59N", 32260: "WGS 72 / UTM zone 60N", 32301: "WGS 72 / UTM zone 1S", 32302: "WGS 72 / UTM zone 2S", 32303: "WGS 72 / UTM zone 3S", 32304: "WGS 72 / UTM zone 4S", 32305: "WGS 72 / UTM zone 5S", 32306: "WGS 72 / UTM zone 6S", 32307: "WGS 72 / UTM zone 7S", 32308: "WGS 72 / UTM zone 8S", 32309: "WGS 72 / UTM zone 9S", 32310: "WGS 72 / UTM zone 10S", 32311: "WGS 72 / UTM zone 11S", 32312: "WGS 72 / UTM zone 12S", 32313: "WGS 72 / UTM zone 13S", 32314: "WGS 72 / UTM zone 14S", 32315: "WGS 72 / UTM zone 15S", 32316: "WGS 72 / UTM zone 16S", 32317: "WGS 72 / UTM zone 17S", 32318: "WGS 72 / UTM zone 18S", 32319: "WGS 72 / UTM zone 19S", 32320: "WGS 72 / UTM zone 20S", 32321: "WGS 72 / UTM zone 21S", 32322: "WGS 72 / UTM zone 22S", 32323: "WGS 72 / UTM zone 23S", 32324: "WGS 72 / UTM zone 24S", 32325: "WGS 72 / UTM zone 25S", 32326: "WGS 72 / UTM zone 26S", 32327: "WGS 72 / UTM zone 27S", 32328: "WGS 72 / UTM zone 28S", 32329: "WGS 72 / UTM zone 29S", 32330: "WGS 72 / UTM zone 30S", 32331: "WGS 72 / UTM zone 31S", 32332: "WGS 72 / UTM zone 32S", 32333: "WGS 72 / UTM zone 33S", 32334: "WGS 72 / UTM zone 34S", 32335: "WGS 72 / UTM zone 35S", 32336: "WGS 72 / UTM zone 36S", 32337: "WGS 72 / UTM zone 37S", 32338: "WGS 72 / UTM zone 38S", 32339: "WGS 72 / UTM zone 39S", 32340: "WGS 72 / UTM zone 40S", 32341: "WGS 72 / UTM zone 41S", 32342: "WGS 72 / UTM zone 42S", 32343: "WGS 72 / UTM zone 43S", 32344: "WGS 72 / UTM zone 44S", 32345: "WGS 72 / UTM zone 45S", 32346: "WGS 72 / UTM zone 46S", 32347: "WGS 72 / UTM zone 47S", 32348: "WGS 72 / UTM zone 48S", 32349: "WGS 72 / UTM zone 49S", 32350: "WGS 72 / UTM zone 50S", 32351: "WGS 72 / UTM zone 51S", 32352: "WGS 72 / UTM zone 52S", 32353: "WGS 72 / UTM zone 53S", 32354: "WGS 72 / UTM zone 54S", 32355: "WGS 72 / UTM zone 55S", 32356: "WGS 72 / UTM zone 56S", 32357: "WGS 72 / UTM zone 57S", 32358: "WGS 72 / UTM zone 58S", 32359: "WGS 72 / UTM zone 59S", 32360: "WGS 72 / UTM zone 60S", 32401: "WGS 72BE / UTM zone 1N", 32402: "WGS 72BE / UTM zone 2N", 32403: "WGS 72BE / UTM zone 3N", 32404: "WGS 72BE / UTM zone 4N", 32405: "WGS 72BE / UTM zone 5N", 32406: "WGS 72BE / UTM zone 6N", 32407: "WGS 72BE / UTM zone 7N", 32408: "WGS 72BE / UTM zone 8N", 32409: "WGS 72BE / UTM zone 9N", 32410: "WGS 72BE / UTM zone 10N", 32411: "WGS 72BE / UTM zone 11N", 32412: "WGS 72BE / UTM zone 12N", 32413: "WGS 72BE / UTM zone 13N", 32414: "WGS 72BE / UTM zone 14N", 32415: "WGS 72BE / UTM zone 15N", 32416: "WGS 72BE / UTM zone 16N", 32417: "WGS 72BE / UTM zone 17N", 32418: "WGS 72BE / UTM zone 18N", 32419: "WGS 72BE / UTM zone 19N", 32420: "WGS 72BE / UTM zone 20N", 32421: "WGS 72BE / UTM zone 21N", 32422: "WGS 72BE / UTM zone 22N", 32423: "WGS 72BE / UTM zone 23N", 32424: "WGS 72BE / UTM zone 24N", 32425: "WGS 72BE / UTM zone 25N", 32426: "WGS 72BE / UTM zone 26N", 32427: "WGS 72BE / UTM zone 27N", 32428: "WGS 72BE / UTM zone 28N", 32429: "WGS 72BE / UTM zone 29N", 32430: "WGS 72BE / UTM zone 30N", 32431: "WGS 72BE / UTM zone 31N", 32432: "WGS 72BE / UTM zone 32N", 32433: "WGS 72BE / UTM zone 33N", 32434: "WGS 72BE / UTM zone 34N", 32435: "WGS 72BE / UTM zone 35N", 32436: "WGS 72BE / UTM zone 36N", 32437: "WGS 72BE / UTM zone 37N", 32438: "WGS 72BE / UTM zone 38N", 32439: "WGS 72BE / UTM zone 39N", 32440: "WGS 72BE / UTM zone 40N", 32441: "WGS 72BE / UTM zone 41N", 32442: "WGS 72BE / UTM zone 42N", 32443: "WGS 72BE / UTM zone 43N", 32444: "WGS 72BE / UTM zone 44N", 32445: "WGS 72BE / UTM zone 45N", 32446: "WGS 72BE / UTM zone 46N", 32447: "WGS 72BE / UTM zone 47N", 32448: "WGS 72BE / UTM zone 48N", 32449: "WGS 72BE / UTM zone 49N", 32450: "WGS 72BE / UTM zone 50N", 32451: "WGS 72BE / UTM zone 51N", 32452: "WGS 72BE / UTM zone 52N", 32453: "WGS 72BE / UTM zone 53N", 32454: "WGS 72BE / UTM zone 54N", 32455: "WGS 72BE / UTM zone 55N", 32456: "WGS 72BE / UTM zone 56N", 32457: "WGS 72BE / UTM zone 57N", 32458: "WGS 72BE / UTM zone 58N", 32459: "WGS 72BE / UTM zone 59N", 32460: "WGS 72BE / UTM zone 60N", 32501: "WGS 72BE / UTM zone 1S", 32502: "WGS 72BE / UTM zone 2S", 32503: "WGS 72BE / UTM zone 3S", 32504: "WGS 72BE / UTM zone 4S", 32505: "WGS 72BE / UTM zone 5S", 32506: "WGS 72BE / UTM zone 6S", 32507: "WGS 72BE / UTM zone 7S", 32508: "WGS 72BE / UTM zone 8S", 32509: "WGS 72BE / UTM zone 9S", 32510: "WGS 72BE / UTM zone 10S", 32511: "WGS 72BE / UTM zone 11S", 32512: "WGS 72BE / UTM zone 12S", 32513: "WGS 72BE / UTM zone 13S", 32514: "WGS 72BE / UTM zone 14S", 32515: "WGS 72BE / UTM zone 15S", 32516: "WGS 72BE / UTM zone 16S", 32517: "WGS 72BE / UTM zone 17S", 32518: "WGS 72BE / UTM zone 18S", 32519: "WGS 72BE / UTM zone 19S", 32520: "WGS 72BE / UTM zone 20S", 32521: "WGS 72BE / UTM zone 21S", 32522: "WGS 72BE / UTM zone 22S", 32523: "WGS 72BE / UTM zone 23S", 32524: "WGS 72BE / UTM zone 24S", 32525: "WGS 72BE / UTM zone 25S", 32526: "WGS 72BE / UTM zone 26S", 32527: "WGS 72BE / UTM zone 27S", 32528: "WGS 72BE / UTM zone 28S", 32529: "WGS 72BE / UTM zone 29S", 32530: "WGS 72BE / UTM zone 30S", 32531: "WGS 72BE / UTM zone 31S", 32532: "WGS 72BE / UTM zone 32S", 32533: "WGS 72BE / UTM zone 33S", 32534: "WGS 72BE / UTM zone 34S", 32535: "WGS 72BE / UTM zone 35S", 32536: "WGS 72BE / UTM zone 36S", 32537: "WGS 72BE / UTM zone 37S", 32538: "WGS 72BE / UTM zone 38S", 32539: "WGS 72BE / UTM zone 39S", 32540: "WGS 72BE / UTM zone 40S", 32541: "WGS 72BE / UTM zone 41S", 32542: "WGS 72BE / UTM zone 42S", 32543: "WGS 72BE / UTM zone 43S", 32544: "WGS 72BE / UTM zone 44S", 32545: "WGS 72BE / UTM zone 45S", 32546: "WGS 72BE / UTM zone 46S", 32547: "WGS 72BE / UTM zone 47S", 32548: "WGS 72BE / UTM zone 48S", 32549: "WGS 72BE / UTM zone 49S", 32550: "WGS 72BE / UTM zone 50S", 32551: "WGS 72BE / UTM zone 51S", 32552: "WGS 72BE / UTM zone 52S", 32553: "WGS 72BE / UTM zone 53S", 32554: "WGS 72BE / UTM zone 54S", 32555: "WGS 72BE / UTM zone 55S", 32556: "WGS 72BE / UTM zone 56S", 32557: "WGS 72BE / UTM zone 57S", 32558: "WGS 72BE / UTM zone 58S", 32559: "WGS 72BE / UTM zone 59S", 32560: "WGS 72BE / UTM zone 60S", 32600: "WGS 84 / UTM grid system (northern hemisphere)", 32601: "WGS 84 / UTM zone 1N", 32602: "WGS 84 / UTM zone 2N", 32603: "WGS 84 / UTM zone 3N", 32604: "WGS 84 / UTM zone 4N", 32605: "WGS 84 / UTM zone 5N", 32606: "WGS 84 / UTM zone 6N", 32607: "WGS 84 / UTM zone 7N", 32608: "WGS 84 / UTM zone 8N", 32609: "WGS 84 / UTM zone 9N", 32610: "WGS 84 / UTM zone 10N", 32611: "WGS 84 / UTM zone 11N", 32612: "WGS 84 / UTM zone 12N", 32613: "WGS 84 / UTM zone 13N", 32614: "WGS 84 / UTM zone 14N", 32615: "WGS 84 / UTM zone 15N", 32616: "WGS 84 / UTM zone 16N", 32617: "WGS 84 / UTM zone 17N", 32618: "WGS 84 / UTM zone 18N", 32619: "WGS 84 / UTM zone 19N", 32620: "WGS 84 / UTM zone 20N", 32621: "WGS 84 / UTM zone 21N", 32622: "WGS 84 / UTM zone 22N", 32623: "WGS 84 / UTM zone 23N", 32624: "WGS 84 / UTM zone 24N", 32625: "WGS 84 / UTM zone 25N", 32626: "WGS 84 / UTM zone 26N", 32627: "WGS 84 / UTM zone 27N", 32628: "WGS 84 / UTM zone 28N", 32629: "WGS 84 / UTM zone 29N", 32630: "WGS 84 / UTM zone 30N", 32631: "WGS 84 / UTM zone 31N", 32632: "WGS 84 / UTM zone 32N", 32633: "WGS 84 / UTM zone 33N", 32634: "WGS 84 / UTM zone 34N", 32635: "WGS 84 / UTM zone 35N", 32636: "WGS 84 / UTM zone 36N", 32637: "WGS 84 / UTM zone 37N", 32638: "WGS 84 / UTM zone 38N", 32639: "WGS 84 / UTM zone 39N", 32640: "WGS 84 / UTM zone 40N", 32641: "WGS 84 / UTM zone 41N", 32642: "WGS 84 / UTM zone 42N", 32643: "WGS 84 / UTM zone 43N", 32644: "WGS 84 / UTM zone 44N", 32645: "WGS 84 / UTM zone 45N", 32646: "WGS 84 / UTM zone 46N", 32647: "WGS 84 / UTM zone 47N", 32648: "WGS 84 / UTM zone 48N", 32649: "WGS 84 / UTM zone 49N", 32650: "WGS 84 / UTM zone 50N", 32651: "WGS 84 / UTM zone 51N", 32652: "WGS 84 / UTM zone 52N", 32653: "WGS 84 / UTM zone 53N", 32654: "WGS 84 / UTM zone 54N", 32655: "WGS 84 / UTM zone 55N", 32656: "WGS 84 / UTM zone 56N", 32657: "WGS 84 / UTM zone 57N", 32658: "WGS 84 / UTM zone 58N", 32659: "WGS 84 / UTM zone 59N", 32660: "WGS 84 / UTM zone 60N", 32661: "WGS 84 / UPS North (N,E)", 32662: "WGS 84 / Plate Carree", 32663: "WGS 84 / World Equidistant Cylindrical", 32664: "WGS 84 / BLM 14N (ftUS)", 32665: "WGS 84 / BLM 15N (ftUS)", 32666: "WGS 84 / BLM 16N (ftUS)", 32667: "WGS 84 / BLM 17N (ftUS)", 32700: "WGS 84 / UTM grid system (southern hemisphere)", 32701: "WGS 84 / UTM zone 1S", 32702: "WGS 84 / UTM zone 2S", 32703: "WGS 84 / UTM zone 3S", 32704: "WGS 84 / UTM zone 4S", 32705: "WGS 84 / UTM zone 5S", 32706: "WGS 84 / UTM zone 6S", 32707: "WGS 84 / UTM zone 7S", 32708: "WGS 84 / UTM zone 8S", 32709: "WGS 84 / UTM zone 9S", 32710: "WGS 84 / UTM zone 10S", 32711: "WGS 84 / UTM zone 11S", 32712: "WGS 84 / UTM zone 12S", 32713: "WGS 84 / UTM zone 13S", 32714: "WGS 84 / UTM zone 14S", 32715: "WGS 84 / UTM zone 15S", 32716: "WGS 84 / UTM zone 16S", 32717: "WGS 84 / UTM zone 17S", 32718: "WGS 84 / UTM zone 18S", 32719: "WGS 84 / UTM zone 19S", 32720: "WGS 84 / UTM zone 20S", 32721: "WGS 84 / UTM zone 21S", 32722: "WGS 84 / UTM zone 22S", 32723: "WGS 84 / UTM zone 23S", 32724: "WGS 84 / UTM zone 24S", 32725: "WGS 84 / UTM zone 25S", 32726: "WGS 84 / UTM zone 26S", 32727: "WGS 84 / UTM zone 27S", 32728: "WGS 84 / UTM zone 28S", 32729: "WGS 84 / UTM zone 29S", 32730: "WGS 84 / UTM zone 30S", 32731: "WGS 84 / UTM zone 31S", 32732: "WGS 84 / UTM zone 32S", 32733: "WGS 84 / UTM zone 33S", 32734: "WGS 84 / UTM zone 34S", 32735: "WGS 84 / UTM zone 35S", 32736: "WGS 84 / UTM zone 36S", 32737: "WGS 84 / UTM zone 37S", 32738: "WGS 84 / UTM zone 38S", 32739: "WGS 84 / UTM zone 39S", 32740: "WGS 84 / UTM zone 40S", 32741: "WGS 84 / UTM zone 41S", 32742: "WGS 84 / UTM zone 42S", 32743: "WGS 84 / UTM zone 43S", 32744: "WGS 84 / UTM zone 44S", 32745: "WGS 84 / UTM zone 45S", 32746: "WGS 84 / UTM zone 46S", 32747: "WGS 84 / UTM zone 47S", 32748: "WGS 84 / UTM zone 48S", 32749: "WGS 84 / UTM zone 49S", 32750: "WGS 84 / UTM zone 50S", 32751: "WGS 84 / UTM zone 51S", 32752: "WGS 84 / UTM zone 52S", 32753: "WGS 84 / UTM zone 53S", 32754: "WGS 84 / UTM zone 54S", 32755: "WGS 84 / UTM zone 55S", 32756: "WGS 84 / UTM zone 56S", 32757: "WGS 84 / UTM zone 57S", 32758: "WGS 84 / UTM zone 58S", 32759: "WGS 84 / UTM zone 59S", 32760: "WGS 84 / UTM zone 60S", 32761: "WGS 84 / UPS South (N,E)", 32766: "WGS 84 / TM 36 SE", 32767: "User-defined" } ProjectionGeoKey = { 10101: "Proj_Alabama_CS27_East", 10102: "Proj_Alabama_CS27_West", 10131: "Proj_Alabama_CS83_East", 10132: "Proj_Alabama_CS83_West", 10201: "Proj_Arizona_Coordinate_System_east", 10202: "Proj_Arizona_Coordinate_System_Central", 10203: "Proj_Arizona_Coordinate_System_west", 10231: "Proj_Arizona_CS83_east", 10232: "Proj_Arizona_CS83_Central", 10233: "Proj_Arizona_CS83_west", 10301: "Proj_Arkansas_CS27_North", 10302: "Proj_Arkansas_CS27_South", 10331: "Proj_Arkansas_CS83_North", 10332: "Proj_Arkansas_CS83_South", 10401: "Proj_California_CS27_I", 10402: "Proj_California_CS27_II", 10403: "Proj_California_CS27_III", 10404: "Proj_California_CS27_IV", 10405: "Proj_California_CS27_V", 10406: "Proj_California_CS27_VI", 10407: "Proj_California_CS27_VII", 10431: "Proj_California_CS83_1", 10432: "Proj_California_CS83_2", 10433: "Proj_California_CS83_3", 10434: "Proj_California_CS83_4", 10435: "Proj_California_CS83_5", 10436: "Proj_California_CS83_6", 10501: "Proj_Colorado_CS27_North", 10502: "Proj_Colorado_CS27_Central", 10503: "Proj_Colorado_CS27_South", 10531: "Proj_Colorado_CS83_North", 10532: "Proj_Colorado_CS83_Central", 10533: "Proj_Colorado_CS83_South", 10600: "Proj_Connecticut_CS27", 10630: "Proj_Connecticut_CS83", 10700: "Proj_Delaware_CS27", 10730: "Proj_Delaware_CS83", 10901: "Proj_Florida_CS27_East", 10902: "Proj_Florida_CS27_West", 10903: "Proj_Florida_CS27_North", 10931: "Proj_Florida_CS83_East", 10932: "Proj_Florida_CS83_West", 10933: "Proj_Florida_CS83_North", 11001: "Proj_Georgia_CS27_East", 11002: "Proj_Georgia_CS27_West", 11031: "Proj_Georgia_CS83_East", 11032: "Proj_Georgia_CS83_West", 11101: "Proj_Idaho_CS27_East", 11102: "Proj_Idaho_CS27_Central", 11103: "Proj_Idaho_CS27_West", 11131: "Proj_Idaho_CS83_East", 11132: "Proj_Idaho_CS83_Central", 11133: "Proj_Idaho_CS83_West", 11201: "Proj_Illinois_CS27_East", 11202: "Proj_Illinois_CS27_West", 11231: "Proj_Illinois_CS83_East", 11232: "Proj_Illinois_CS83_West", 11301: "Proj_Indiana_CS27_East", 11302: "Proj_Indiana_CS27_West", 11331: "Proj_Indiana_CS83_East", 11332: "Proj_Indiana_CS83_West", 11401: "Proj_Iowa_CS27_North", 11402: "Proj_Iowa_CS27_South", 11431: "Proj_Iowa_CS83_North", 11432: "Proj_Iowa_CS83_South", 11501: "Proj_Kansas_CS27_North", 11502: "Proj_Kansas_CS27_South", 11531: "Proj_Kansas_CS83_North", 11532: "Proj_Kansas_CS83_South", 11601: "Proj_Kentucky_CS27_North", 11602: "Proj_Kentucky_CS27_South", 11631: "Proj_Kentucky_CS83_North", 11632: "Proj_Kentucky_CS83_South", 11701: "Proj_Louisiana_CS27_North", 11702: "Proj_Louisiana_CS27_South", 11731: "Proj_Louisiana_CS83_North", 11732: "Proj_Louisiana_CS83_South", 11801: "Proj_Maine_CS27_East", 11802: "Proj_Maine_CS27_West", 11831: "Proj_Maine_CS83_East", 11832: "Proj_Maine_CS83_West", 11900: "Proj_Maryland_CS27", 11930: "Proj_Maryland_CS83", 12001: "Proj_Massachusetts_CS27_Mainland", 12002: "Proj_Massachusetts_CS27_Island", 12031: "Proj_Massachusetts_CS83_Mainland", 12032: "Proj_Massachusetts_CS83_Island", 12101: "Proj_Michigan_State_Plane_East", 12102: "Proj_Michigan_State_Plane_Old_Central", 12103: "Proj_Michigan_State_Plane_West", 12111: "Proj_Michigan_CS27_North", 12112: "Proj_Michigan_CS27_Central", 12113: "Proj_Michigan_CS27_South", 12141: "Proj_Michigan_CS83_North", 12142: "Proj_Michigan_CS83_Central", 12143: "Proj_Michigan_CS83_South", 12201: "Proj_Minnesota_CS27_North", 12202: "Proj_Minnesota_CS27_Central", 12203: "Proj_Minnesota_CS27_South", 12231: "Proj_Minnesota_CS83_North", 12232: "Proj_Minnesota_CS83_Central", 12233: "Proj_Minnesota_CS83_South", 12301: "Proj_Mississippi_CS27_East", 12302: "Proj_Mississippi_CS27_West", 12331: "Proj_Mississippi_CS83_East", 12332: "Proj_Mississippi_CS83_West", 12401: "Proj_Missouri_CS27_East", 12402: "Proj_Missouri_CS27_Central", 12403: "Proj_Missouri_CS27_West", 12431: "Proj_Missouri_CS83_East", 12432: "Proj_Missouri_CS83_Central", 12433: "Proj_Missouri_CS83_West", 12501: "Proj_Montana_CS27_North", 12502: "Proj_Montana_CS27_Central", 12503: "Proj_Montana_CS27_South", 12530: "Proj_Montana_CS83", 12601: "Proj_Nebraska_CS27_North", 12602: "Proj_Nebraska_CS27_South", 12630: "Proj_Nebraska_CS83", 12701: "Proj_Nevada_CS27_East", 12702: "Proj_Nevada_CS27_Central", 12703: "Proj_Nevada_CS27_West", 12731: "Proj_Nevada_CS83_East", 12732: "Proj_Nevada_CS83_Central", 12733: "Proj_Nevada_CS83_West", 12800: "Proj_New_Hampshire_CS27", 12830: "Proj_New_Hampshire_CS83", 12900: "Proj_New_Jersey_CS27", 12930: "Proj_New_Jersey_CS83", 13001: "Proj_New_Mexico_CS27_East", 13002: "Proj_New_Mexico_CS27_Central", 13003: "Proj_New_Mexico_CS27_West", 13031: "Proj_New_Mexico_CS83_East", 13032: "Proj_New_Mexico_CS83_Central", 13033: "Proj_New_Mexico_CS83_West", 13101: "Proj_New_York_CS27_East", 13102: "Proj_New_York_CS27_Central", 13103: "Proj_New_York_CS27_West", 13104: "Proj_New_York_CS27_Long_Island", 13131: "Proj_New_York_CS83_East", 13132: "Proj_New_York_CS83_Central", 13133: "Proj_New_York_CS83_West", 13134: "Proj_New_York_CS83_Long_Island", 13200: "Proj_North_Carolina_CS27", 13230: "Proj_North_Carolina_CS83", 13301: "Proj_North_Dakota_CS27_North", 13302: "Proj_North_Dakota_CS27_South", 13331: "Proj_North_Dakota_CS83_North", 13332: "Proj_North_Dakota_CS83_South", 13401: "Proj_Ohio_CS27_North", 13402: "Proj_Ohio_CS27_South", 13431: "Proj_Ohio_CS83_North", 13432: "Proj_Ohio_CS83_South", 13501: "Proj_Oklahoma_CS27_North", 13502: "Proj_Oklahoma_CS27_South", 13531: "Proj_Oklahoma_CS83_North", 13532: "Proj_Oklahoma_CS83_South", 13601: "Proj_Oregon_CS27_North", 13602: "Proj_Oregon_CS27_South", 13631: "Proj_Oregon_CS83_North", 13632: "Proj_Oregon_CS83_South", 13701: "Proj_Pennsylvania_CS27_North", 13702: "Proj_Pennsylvania_CS27_South", 13731: "Proj_Pennsylvania_CS83_North", 13732: "Proj_Pennsylvania_CS83_South", 13800: "Proj_Rhode_Island_CS27", 13830: "Proj_Rhode_Island_CS83", 13901: "Proj_South_Carolina_CS27_North", 13902: "Proj_South_Carolina_CS27_South", 13930: "Proj_South_Carolina_CS83", 14001: "Proj_South_Dakota_CS27_North", 14002: "Proj_South_Dakota_CS27_South", 14031: "Proj_South_Dakota_CS83_North", 14032: "Proj_South_Dakota_CS83_South", 14100: "Proj_Tennessee_CS27", 14130: "Proj_Tennessee_CS83", 14201: "Proj_Texas_CS27_North", 14202: "Proj_Texas_CS27_North_Central", 14203: "Proj_Texas_CS27_Central", 14204: "Proj_Texas_CS27_South_Central", 14205: "Proj_Texas_CS27_South", 14231: "Proj_Texas_CS83_North", 14232: "Proj_Texas_CS83_North_Central", 14233: "Proj_Texas_CS83_Central", 14234: "Proj_Texas_CS83_South_Central", 14235: "Proj_Texas_CS83_South", 14301: "Proj_Utah_CS27_North", 14302: "Proj_Utah_CS27_Central", 14303: "Proj_Utah_CS27_South", 14331: "Proj_Utah_CS83_North", 14332: "Proj_Utah_CS83_Central", 14333: "Proj_Utah_CS83_South", 14400: "Proj_Vermont_CS27", 14430: "Proj_Vermont_CS83", 14501: "Proj_Virginia_CS27_North", 14502: "Proj_Virginia_CS27_South", 14531: "Proj_Virginia_CS83_North", 14532: "Proj_Virginia_CS83_South", 14601: "Proj_Washington_CS27_North", 14602: "Proj_Washington_CS27_South", 14631: "Proj_Washington_CS83_North", 14632: "Proj_Washington_CS83_South", 14701: "Proj_West_Virginia_CS27_North", 14702: "Proj_West_Virginia_CS27_South", 14731: "Proj_West_Virginia_CS83_North", 14732: "Proj_West_Virginia_CS83_South", 14801: "Proj_Wisconsin_CS27_North", 14802: "Proj_Wisconsin_CS27_Central", 14803: "Proj_Wisconsin_CS27_South", 14831: "Proj_Wisconsin_CS83_North", 14832: "Proj_Wisconsin_CS83_Central", 14833: "Proj_Wisconsin_CS83_South", 14901: "Proj_Wyoming_CS27_East", 14902: "Proj_Wyoming_CS27_East_Central", 14903: "Proj_Wyoming_CS27_West_Central", 14904: "Proj_Wyoming_CS27_West", 14931: "Proj_Wyoming_CS83_East", 14932: "Proj_Wyoming_CS83_East_Central", 14933: "Proj_Wyoming_CS83_West_Central", 14934: "Proj_Wyoming_CS83_West", 15001: "Proj_Alaska_CS27_1", 15002: "Proj_Alaska_CS27_2", 15003: "Proj_Alaska_CS27_3", 15004: "Proj_Alaska_CS27_4", 15005: "Proj_Alaska_CS27_5", 15006: "Proj_Alaska_CS27_6", 15007: "Proj_Alaska_CS27_7", 15008: "Proj_Alaska_CS27_8", 15009: "Proj_Alaska_CS27_9", 15010: "Proj_Alaska_CS27_10", 15031: "Proj_Alaska_CS83_1", 15032: "Proj_Alaska_CS83_2", 15033: "Proj_Alaska_CS83_3", 15034: "Proj_Alaska_CS83_4", 15035: "Proj_Alaska_CS83_5", 15036: "Proj_Alaska_CS83_6", 15037: "Proj_Alaska_CS83_7", 15038: "Proj_Alaska_CS83_8", 15039: "Proj_Alaska_CS83_9", 15040: "Proj_Alaska_CS83_10", 15101: "Proj_Hawaii_CS27_1", 15102: "Proj_Hawaii_CS27_2", 15103: "Proj_Hawaii_CS27_3", 15104: "Proj_Hawaii_CS27_4", 15105: "Proj_Hawaii_CS27_5", 15131: "Proj_Hawaii_CS83_1", 15132: "Proj_Hawaii_CS83_2", 15133: "Proj_Hawaii_CS83_3", 15134: "Proj_Hawaii_CS83_4", 15135: "Proj_Hawaii_CS83_5", 15201: "Proj_Puerto_Rico_CS27", 15202: "Proj_St_Croix", 15230: "Proj_Puerto_Rico_Virgin_Is", 15914: "Proj_BLM_14N_feet", 15915: "Proj_BLM_15N_feet", 15916: "Proj_BLM_16N_feet", 15917: "Proj_BLM_17N_feet", 17348: "Proj_Map_Grid_of_Australia_48", 17349: "Proj_Map_Grid_of_Australia_49", 17350: "Proj_Map_Grid_of_Australia_50", 17351: "Proj_Map_Grid_of_Australia_51", 17352: "Proj_Map_Grid_of_Australia_52", 17353: "Proj_Map_Grid_of_Australia_53", 17354: "Proj_Map_Grid_of_Australia_54", 17355: "Proj_Map_Grid_of_Australia_55", 17356: "Proj_Map_Grid_of_Australia_56", 17357: "Proj_Map_Grid_of_Australia_57", 17358: "Proj_Map_Grid_of_Australia_58", 17448: "Proj_Australian_Map_Grid_48", 17449: "Proj_Australian_Map_Grid_49", 17450: "Proj_Australian_Map_Grid_50", 17451: "Proj_Australian_Map_Grid_51", 17452: "Proj_Australian_Map_Grid_52", 17453: "Proj_Australian_Map_Grid_53", 17454: "Proj_Australian_Map_Grid_54", 17455: "Proj_Australian_Map_Grid_55", 17456: "Proj_Australian_Map_Grid_56", 17457: "Proj_Australian_Map_Grid_57", 17458: "Proj_Australian_Map_Grid_58", 18031: "Proj_Argentina_1", 18032: "Proj_Argentina_2", 18033: "Proj_Argentina_3", 18034: "Proj_Argentina_4", 18035: "Proj_Argentina_5", 18036: "Proj_Argentina_6", 18037: "Proj_Argentina_7", 18051: "Proj_Colombia_3W", 18052: "Proj_Colombia_Bogota", 18053: "Proj_Colombia_3E", 18054: "Proj_Colombia_6E", 18072: "Proj_Egypt_Red_Belt", 18073: "Proj_Egypt_Purple_Belt", 18074: "Proj_Extended_Purple_Belt", 18141: "Proj_New_Zealand_North_Island_Nat_Grid", 18142: "Proj_New_Zealand_South_Island_Nat_Grid", 19900: "Proj_Bahrain_Grid", 19905: "Proj_Netherlands_E_Indies_Equatorial", 19912: "Proj_RSO_Borneo", 32767: "User-defined" } InteropIndex = { b'R03': "R03 - DCF option file (Adobe RGB)", b'R98': "R98 - DCF basic file (sRGB)", b'THM': "THM - DCF thumbnail file", b'R03\x00': "R03 - DCF option file (Adobe RGB)", b'R98\x00': "R98 - DCF basic file (sRGB)", b'THM\x00': "THM - DCF thumbnail file" }
from django.conf.urls import url from django.contrib.auth.decorators import login_required from swingtime import views from swingtime.models import BookingLocation urlpatterns = [ url( r'^$', login_required( views.CalendarList_View.as_view( template_name='swingtime/choose_location.html')), name='swingtime-choose-location', ), url( r'^calendar-feeds', login_required( views.CalendarList_View.as_view( template_name='swingtime/location_feeds.html')), name='swingtime-calendar-feeds', ), url(r'^(?P<calendar_slug>[\w_-]+)/calendar/$', views.today_view, name='swingtime-today'), url(r'^(?P<calendar_slug>[\w_-]+)/$', views.current_month_view, name='swingtime-current-month'), url(r'^(?P<calendar_slug>[\w_-]+)/current-year$', views.current_year_view, name='swingtime-current-year'), url(r'^(?P<calendar_slug>[\w_-]+)/(?P<year>\d{4})/$', views.year_view, name='swingtime-yearly-view'), url(r'^(?P<calendar_slug>[\w_-]+)/(?P<year>\d{4})/(?P<month>0?[1-9]|1[012])/$', views.month_view, name='swingtime-monthly-view'), url(r'^(?P<calendar_slug>[\w_-]+)/(?P<year>\d{4})/(?P<month>0?[1-9]|1[012])/(?P<day>[0-3]?\d)/$', views.day_view, name='swingtime-daily-view'), url(r'^(?P<calendar_slug>[\w_-]+)/events/$', views.event_listing, name='swingtime-events'), url(r'^(?P<calendar_slug>[\w_-]+)/events/add/$', views.add_event, name='swingtime-add-event'), url(r'^(?P<calendar_slug>[\w_-]+)/events/(?P<event_pk>\d+)/$', views.event_view, name='swingtime-event'), url( r'^(?P<calendar_slug>[\w_-]+)/events/(?P<event_pk>\d+)/(?P<occurrence_pk>\d+)/$', views.occurrence_view, name='swingtime-occurrence', ), url( r'^(?P<calendar_slug>[\w_-]+)/events/(?P<event_pk>\d+)/(?P<occurrence_pk>\d+)/delete/$', views.occurrence_delete, name='swingtime-occurrence-delete', ), ## TODO: update these to calendar_slug url( r'^webcal/(?P<room_slug>[\w_-]+)$', views.webcal, name='swingtime-webcal', ), url( r'^print-calendar/(?P<room_slug>[\w_-]+)/(?P<year>\d{4})/(?P<month>0?[1-9]|1[012])/$', views.print_month, name='swingtime-month-print', ), url( r'^print-calendar-source/(?P<room_slug>[\w_-]+)/(?P<year>\d{4})/(?P<month>0?[1-9]|1[012])/$', views.print_month_source, name='swingtime-month-print-source', ), ]
# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys import traceback from xml.etree.ElementTree import ElementTree from ament_tools.helper import argparse_existing_dir def prepare_arguments(parser, args): """ Add parameters to argparse for the build verb and available plugins. After adding the generic verb arguments, this function loads all available build_type plugins and then allows the plugins to add additional arguments to the parser in a new :py:class:`argparse.ArgumentGroup` for that build_type. :param parser: ArgumentParser object to which arguments are added :type parser: :py:class:`argparse.ArgumentParser` :param list args: list of arguments as str's :returns: modified version of the original parser given :rtype: :py:class:`argparse.ArgumentParser` """ # Add verb arguments parser.add_argument( 'basepath', nargs='?', type=argparse_existing_dir, default=os.curdir, help="Base path to start crawling for test results (default '.')", ) parser.add_argument( '--verbose', action='store_true', default=False, help='Show all test result file (even without errors / failures)', ) return parser def main(opts): # use PWD in order to work when being invoked in a symlinked location cwd = os.getenv('PWD', os.curdir) opts.basepath = os.path.abspath(os.path.join(cwd, opts.basepath)) # verify that workspace folder exists if not os.path.exists(opts.basepath): raise RuntimeError("The specified base path '%s' does not exist" % opts.basepath) try: results = collect_test_results(opts.basepath, verbose=opts.verbose) _, sum_errors, sum_failures, sum_skipped = aggregate_results(results) print_summary(results, show_stable=opts.verbose) if sum_errors or sum_failures: return 1 except Exception as e: print(', '.join([line.strip() for line in traceback.format_exception_only(type(e), e)]), file=sys.stderr) return 2 def collect_test_results(test_results_dir, verbose=False): """ Collect test results by parsing all xml files in a given path. Each file is interpreted as a JUnit result file. :param test_results_dir: str foldername :returns: dict {rel_path, (num_tests, num_errors, num_failures)} """ results = {} for dirpath, dirnames, filenames in os.walk(test_results_dir): # do not recurse into folders starting with a dot dirnames[:] = [d for d in dirnames if not d.startswith('.')] for filename in [f for f in filenames if f.endswith('.xml')]: filename_abs = os.path.join(dirpath, filename) name = filename_abs[len(test_results_dir) + 1:] try: num_tests, num_errors, num_failures, num_skipped = read_junit(filename_abs) except TypeError as e: if verbose: print("Skipping '%s': %s" % (name, str(e)), file=sys.stderr) continue except Exception as e: print("Skipping '%s': %s" % (name, ', '.join([line.strip() for line in traceback.format_exception_only(type(e), e)])), file=sys.stderr) continue results[name] = (num_tests, num_errors, num_failures, num_skipped) return results def read_junit(filename): """ Parse xml file expected to follow junit/gtest conventions. See http://code.google.com/p/googletest/wiki/AdvancedGuide#Generating_an_XML_Report :cmt:`# noqa` :param filename: str junit xml file name :returns: num_tests, num_errors, num_failures :raises IOError: if filename does not exist :raises ParseError: if xml is not well-formed :raises TypeError: if the root node if not named 'testsuite' """ tree = ElementTree() root = tree.parse(filename) if root.tag not in ['testsuite', 'testsuites']: raise TypeError( 'seem not to be a JUnit result file ' "(does not have a 'testsuite' or 'testsuites' root tag)") num_tests = int(root.attrib['tests']) num_errors = int(root.attrib.get('errors', 0)) num_failures = int(root.attrib['failures']) num_skipped = int(root.attrib.get('skip', 0)) num_skipped += int(root.attrib.get('disabled', 0)) return (num_tests, num_errors, num_failures, num_skipped) def aggregate_results(results, callback_per_result=None): """ Aggregate results. :param results: dict as from test_results() :returns: tuple (num_tests, num_errors, num_failures) """ sum_tests = sum_errors = sum_failures = sum_skipped = 0 for name in sorted(results.keys()): (num_tests, num_errors, num_failures, num_skipped) = results[name] sum_tests += num_tests sum_errors += num_errors sum_failures += num_failures sum_skipped += num_skipped if callback_per_result: callback_per_result(name, num_tests, num_errors, num_failures, num_skipped) return sum_tests, sum_errors, sum_failures, sum_skipped def print_summary(results, show_stable=False, show_unstable=True): """ Print summary to stdout. :param results: dict as from test_results() :param show_stable: print tests without errors or failures :param show_unstable: print tests with errors or failures """ def callback(name, num_tests, num_errors, num_failures, num_skipped): if show_stable and not (num_errors or num_failures): print('%s: %d tests, %d skipped' % (name, num_tests, num_skipped)) if show_unstable and (num_errors or num_failures): print('%s: %d tests, %d skipped, %d errors, %d failures' % (name, num_tests, num_skipped, num_errors, num_failures)) sum_tests, sum_errors, sum_failures, sum_skipped = aggregate_results(results, callback) print('Summary: %d tests, %d errors, %d failures, %d skipped' % (sum_tests, sum_errors, sum_failures, sum_skipped))
import abc import json import re import time import uuid import plpy from datetime import datetime from contextlib import contextmanager from urlparse import urlparse @contextmanager def metrics(function, service_config, logger=None, params=None): try: start_time = time.time() yield finally: end_time = time.time() MetricsDataGatherer.add('uuid', str(uuid.uuid1())) MetricsDataGatherer.add('function_name', function) MetricsDataGatherer.add('function_params', params) MetricsDataGatherer.add('function_execution_time', (end_time - start_time)) metrics_logger = MetricsServiceLoggerFactory.build(service_config, logger) if metrics_logger: data = MetricsDataGatherer.get() metrics_logger.log(data) MetricsDataGatherer.clean() class Traceable: """ Module to add metrics traceability, for example to get response object in order to add to the metrics dump """ def add_response_data(self, response, logger=None): try: response_data = {} response_data['type'] = "request" response_data['date'] = datetime.now().isoformat() response_data['elapsed_time'] = response.elapsed.total_seconds() response_data['code'] = response.status_code response_data['message'] = response.reason response_data['url'] = self._parse_response_url(response.url) stored_data = MetricsDataGatherer.get_element('response') if stored_data: stored_data.append(response_data) else: MetricsDataGatherer.add('response', [response_data]) except BaseException as e: # We don't want to stop the job for some error processing response if logger: logger.error("Error trying to process response metricd data", exception=e) def _parse_response_url(self, url): u = urlparse(url) return "{0}://{1}{2}".format(u.scheme, u.netloc, u.path) class MetricsDataGatherer: """ Metrics gatherer used as a singleton. The intend is to use it as a global storage for the metrics along the function request. """ class __MetricsDataGatherer: def __init__(self): self.data = {} def add(self, key, value): self.data[key] = value def get(self): return self.data def get_element(self, key): return self.data.get(key, None) def clean(self): self.data = {} # We use pgbouncer so we need to have multiples instances per request id __instance = {} @classmethod def add(self, key, value): MetricsDataGatherer.instance().add(key, value) @classmethod def get(self): return MetricsDataGatherer.instance().get() @classmethod def get_element(self, key): return MetricsDataGatherer.instance().get_element(key) @classmethod def clean(self): MetricsDataGatherer.instance().clean() @classmethod def instance(self): txid = MetricsDataGatherer._get_txid() if txid not in MetricsDataGatherer.__instance: MetricsDataGatherer.__instance[txid] = MetricsDataGatherer.__MetricsDataGatherer() return MetricsDataGatherer.__instance[txid] @classmethod def _get_txid(self): result = plpy.execute('select txid_current() as txid') return result[0]['txid'] class MetricsServiceLoggerFactory: @classmethod def build(self, service_config, logger=None): if re.search('^geocoder_*', service_config.service_type): return MetricsGeocoderLogger(service_config, logger) elif re.search('^routing_*', service_config.service_type): return MetricsGenericLogger(service_config, logger) elif re.search('_isolines$', service_config.service_type): return MetricsIsolinesLogger(service_config, logger) elif re.search('^obs_*', service_config.service_type): return MetricsGenericLogger(service_config, logger) else: return None class MetricsLogger(object): __metaclass__ = abc.ABCMeta def __init__(self, service_config, logger): self._service_config = service_config self._logger = logger def dump_to_file(self, data): try: log_path = self.service_config.metrics_log_path response_data = data.pop('response', []) uuid = data.get('uuid') if log_path: with open(log_path, 'a') as logfile: self._dump_response_to_file(uuid, response_data, logfile) json.dump(data, logfile) logfile.write('\n') except BaseException as e: self._logger.error("Error dumping metrics to file {0}".format(log_path), exception=e) def collect_data(self, data): return { "uuid": data.get('uuid', uuid.uuid1()), "type": 'function', "function_name": data.get('function_name', None), "function_params": data.get('function_params', None), "function_execution_time": data.get('function_execution_time', None), "service": self._service_config.service_type, "processable_rows": 1, "success": data.get('success', False), "successful_rows": data.get('successful_rows', 0), "failed_rows": data.get('failed_rows', 0), "empty_rows": data.get('empty_rows', 0), "created_at": datetime.now().isoformat(), "provider": self._service_config.provider, "username": self._service_config.username, "organization": self._service_config.organization, "response": data.get('response', []) } def _dump_response_to_file(self, uuid, response_data, log_file): for r in response_data: r['uuid'] = uuid json.dump(r, log_file) log_file.write('\n') @property def service_config(self): return self._service_config @abc.abstractproperty def log(self, data): raise NotImplementedError('log method must be defined') class MetricsGeocoderLogger(MetricsLogger): def __init__(self, service_config, logger): super(MetricsGeocoderLogger, self).__init__(service_config, logger) def log(self, data): dump_data = self.collect_data(data) self.dump_to_file(dump_data) def collect_data(self, data): dump_data = super(MetricsGeocoderLogger, self).collect_data(data) if data.get('success', False): cost = self.service_config.cost_per_hit else: cost = 0 if self.service_config.is_high_resolution: kind = 'high-resolution' else: kind = 'internal' dump_data.update({ "batched": False, "cache_hits": 0, # Always 0 because no cache involved # https://github.com/CartoDB/cartodb/blob/master/app/models/geocoding.rb#L208-L211 "cost": cost, "geocoder_type": self.service_config.service_type, "kind": kind, "processed_rows": data.get('successful_rows', 0), "real_rows": data.get('successful_rows', 0), }) return dump_data class MetricsGenericLogger(MetricsLogger): def __init__(self, service_config, logger): super(MetricsGenericLogger, self).__init__(service_config, logger) def log(self, data): dump_data = self.collect_data(data) self.dump_to_file(dump_data) def collect_data(self, data): return super(MetricsGenericLogger, self).collect_data(data) class MetricsIsolinesLogger(MetricsLogger): def __init__(self, service_config, logger): super(MetricsIsolinesLogger, self).__init__(service_config, logger) def log(self, data): dump_data = self.collect_data(data) self.dump_to_file(dump_data) def collect_data(self, data): dump_data = super(MetricsIsolinesLogger, self).collect_data(data) dump_data.update({ "isolines_generated": data.get('isolines_generated', 0) }) return dump_data
countries={'uk','usa'} curr={'pound','dollar'} a=(zip(countries,curr)) # for i in a: # print(i) d={} for k,v in a: d[k]=v print(d)
# Special logger that writes to sys.stdout using colors and saves to logfile # Usage: logger = logging.getLogger(__name__) import sys import logging import colorama class BaseFormatter(logging.Formatter): def __init__(self, fmt=None, datefmt=None): FORMAT = '%(customlevelname)s%(message)s' super().__init__(fmt=FORMAT, datefmt=datefmt) def format(self, record): record.__dict__['customlevelname'] = self._get_levelname(record.levelname) if not hasattr(record, '_edited') and record.levelname != 'PRINT': record._edited = True # format multiline messages 'nicely' to make it clear they are together record.msg = str(record.msg).replace('\n', '\n | ') ##if not isinstance(record.args, Mapping): record.args = tuple(arg.replace('\n', '\n | ') if isinstance(arg, str) else arg for arg in record.args) return super().format(record) def formatException(self, ei): """prefix traceback info for better representation""" s = super().formatException(ei) # fancy format traceback s = '\n'.join(' | ' + line for line in s.splitlines()) # separate the traceback from the preceding lines s = ' |___\n{}'.format(s) return s def _get_levelname(self, name): """NOOP: overridden by subclasses""" return name class TextFormatter(BaseFormatter): def _get_levelname(self, name): if name == 'INFO': return '-> ' elif name == 'PRINT': return '' else: return name + ': ' class ColorFormatter(BaseFormatter): FORMATS = {'CRITICAL': colorama.Back.RED, 'ERROR': colorama.Fore.LIGHTRED_EX, 'WARNING': colorama.Fore.YELLOW, 'PRINT': '', 'INFO': colorama.Fore.CYAN, # colorama.Fore.GREEN 'DEBUG': colorama.Back.LIGHTWHITE_EX} def _get_levelname(self, name): if name == 'INFO': fmt = '{0}->{2} ' elif name == 'PRINT': fmt = '' else: fmt = '{0}{1}{2}: ' return fmt.format(self.FORMATS.get(name, ''), name, colorama.Style.RESET_ALL) def init_logging(logfile=None, debug=False): """Customize log and send it to console and logfile""" logging.addLevelName(25, 'PRINT') if logging.getLoggerClass().__name__ == 'FatalLogger': # disable log deduplication by Pelican logging.getLoggerClass().limit_filter.LOGS_DEDUP_MIN_LEVEL = logging.INFO # still ON for logger.info or less colorama.init() root_logger = logging.getLogger() root_logger.setLevel(25) # don't log INFO or lower # console handler console_handler = logging.StreamHandler(stream=sys.stdout) console_handler.setFormatter(ColorFormatter()) root_logger.addHandler(console_handler) # file handler if logfile: file_handler = logging.FileHandler(logfile, mode='a') file_handler.setFormatter(TextFormatter()) file_handler.setLevel(logging.INFO) root_logger.addHandler(file_handler)
from unittest import TestCase from nally.core.layers.inet.ip.ip_diff_service_values import IpDiffServiceValues from nally.core.layers.inet.ip.ip_ecn_values import IpEcnValues from nally.core.layers.inet.ip.ip_fragmentation_flags import IpFragmentationFlags from nally.core.layers.inet.ip.ip_packet import IpPacket # # DSCP = 0 # total length = 25 bytes (20 + 5) # identification = 39434 # flags = 0 (no flags set) # ttl = 64 # protocol = Test (255) # source IP = 192.168.1.8 # destination IP = 126.12.14.67 # payload = 5 * 0x58 bytes # PACKET_DUMP_1 = "450000199a0a000040fd91dec0a801087e0c0e435858585858" # # DSCP = 0xb8 (EF PHB + Non-ECN) # total length = 20 bytes # identification = 29320 # flags = 0 # ttl = 64 # protocol = TCP (6) # source IP = 192.168.1.8 # destination IP = 8.8.8.8 # PACKET_DUMP_2 = "45b8001472880000400635e4c0a8010808080808" # # DSCP = 0xbb (EF PHB + CE) # total length = 20 bytes # identification = 55463 # flags = 0 # ttl = 64 # protocol = TCP (6) # source IP = 192.168.1.8 # destination IP = 8.8.8.8 # PACKET_DUMP_3 = "45bb0014d8a700004006cfc1c0a8010808080808" # test protocol type according to # https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml TEST_PROTO_TYPE = 253 class TestIpv4Packet(TestCase): def test_to_bytes(self): ip_packet_1 = IpPacket( source_addr_str="192.168.1.8", dest_addr_str="126.12.14.67", flags=IpFragmentationFlags(), identification=39434, protocol=TEST_PROTO_TYPE ) / bytes([0x58] * 5) hex_dump_1 = ip_packet_1.to_bytes().hex() self.assertEqual(PACKET_DUMP_1, hex_dump_1) self.assertEqual(ip_packet_1, IpPacket.from_bytes(ip_packet_1.to_bytes())) ip_packet_2 = IpPacket( source_addr_str="192.168.1.8", dest_addr_str="8.8.8.8", dscp=IpDiffServiceValues.EF, flags=IpFragmentationFlags(), identification=29320 ) hex_dump_2 = ip_packet_2.to_bytes().hex() self.assertEqual(PACKET_DUMP_2, hex_dump_2) self.assertEqual(ip_packet_2, IpPacket.from_bytes(ip_packet_2.to_bytes())) ip_packet_3 = IpPacket( source_addr_str="192.168.1.8", dest_addr_str="8.8.8.8", dscp=IpDiffServiceValues.EF, ecn=IpEcnValues.CE, flags=IpFragmentationFlags(), identification=55463 ) hex_dump_3 = ip_packet_3.to_bytes().hex() self.assertEqual(PACKET_DUMP_3, hex_dump_3) self.assertEqual(ip_packet_3, IpPacket.from_bytes(ip_packet_3.to_bytes())) def test_packet_creation_with_invalid_fields(self): # pass too long payload invalid_ip_packet = IpPacket( source_addr_str="10.10.128.44", dest_addr_str="216.58.209.14", ) / bytearray(65535) self.assertRaises(ValueError, invalid_ip_packet.to_bytes) # pass too long Identification field self.assertRaises( ValueError, IpPacket, source_addr_str="10.10.128.44", dest_addr_str="216.58.209.14", identification=pow(2, 16) ) # pass too long Fragment Offset field self.assertRaises( ValueError, IpPacket, source_addr_str="10.10.128.44", dest_addr_str="216.58.209.14", fragment_offset=pow(2, 13) )
"""This module contains some general purpose utilities that are used across Diofant. """ from .iterables import (cantor_product, capture, default_sort_key, flatten, group, has_dups, has_variety, numbered_symbols, ordered, postfixes, postorder_traversal, prefixes, sift, subsets, topological_sort, unflatten, variations) from .lambdify import lambdify from .misc import filldedent __all__ = ('cantor_product', 'capture', 'default_sort_key', 'flatten', 'group', 'has_dups', 'has_variety', 'numbered_symbols', 'ordered', 'postfixes', 'postorder_traversal', 'prefixes', 'sift', 'subsets', 'topological_sort', 'unflatten', 'variations', 'lambdify', 'filldedent')
import ast, operator from .. import value def Name(node): return value.Symbol(node.id), [] def Attribute(node): # a.b.c names = [] n = node # Trace back the chain of attributes. while isinstance(n, ast.Attribute): names.append(n.attr) n = n.value if isinstance(n, ast.Name): # It's a top-level name. names.append(n.id) symbol = '.'.join(reversed(names)) return value.Symbol(symbol), [] # It's a relative name - so just use getattr. return operator.attrgetter(node.attr), [node.value]
class Solution: def insertionSortList(self, head: ListNode) -> ListNode: dummy = ListNode(0) curr = head while curr: prev = dummy while prev.next and prev.next.val < curr.val: prev = prev.next next = curr.next curr.next = prev.next prev.next = curr curr = next return dummy.next
# Crankset class class StimulationSignal: import Ergocycle as Ergocycle # Constuctor def __init__(self, frequency, amplitude, pulse_width, training_time, muscle, electrode): self.frequency = frequency self.amplitude = amplitude self.pulse_width = pulse_width self.training_time = training_time self.muscle = muscle self.electrode = electrode def set_stimulation_signal(electrode): frequency =[] amplitude = [] pulse_width = [] muscle = [] for i in range(electrode): frequency.append(electrode[0,i]) # Vérifier comment sont envoyés les paramètres
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-29 06:32 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('tablemanager', '0033_auto_20160620_1121'), ] operations = [ migrations.RemoveField( model_name='publish', name='default_style', ), migrations.RemoveField( model_name='publish', name='kmi_abstract', ), migrations.RemoveField( model_name='publish', name='kmi_title', ), ]
import random import torch import torch.nn.functional as F import torch.optim as optim import numpy as np from networks.maddpg_critic_version_3 import MADDPGCriticVersion3 from networks.maddpg_actor_version_2 import MADDPGActorVersion2 from agents.base_agent import BaseAgent from agents.game import Game from utils.ounoise import OUNoise from utils.experience_pack import ExperienceUnpack import pdb device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') class ActionQuery(): """ Query result """ def __init__(self): self.next_actions = None """ MADDPGAgent (Version 4) 1. Add gradient clipping of gradient of Q-function 2. Reset OUNoise after every calling learn() 3. In forward_all, if agent_other is not self, detach tensor of agent_other.forward() 4. change input shape of 1st layer of critic network from (state_size) to (state_size+action_size) """ class MADDPGAgentVersion4(BaseAgent): def __init__(self, game, num_agents, state_size, action_size, name, random_seed=0, lr_critic=1e-3, lr_actor=1e-3, fc1_units=400, fc2_units=300, buffer_size=int(1e6), batch_size=128, gamma=0.99, tau=1e-3, max_norm=1.0, epsilon_start=1.0, epsilon_end=0.1, epsilon_decay=0.99, exploration_mu=0.0, exploration_theta=0.15, exploration_sigma=0.2): """Initialize an Agent object. Args: game (class Game): meidator in chain-of-responsibility design pattern. (Broker chain) random_seed (int): random seed. max_norm (float): value of clip_grad_norm for critic optimizer """ super().__init__() self.index_agent = None self.game = game self.num_agents = num_agents self.state_size = state_size self.action_size = action_size self.name = name self.seed = random.seed(random_seed) self.max_norm = max_norm self.epsilon = epsilon_start self.epsilon_end = epsilon_end self.epsilon_decay = epsilon_decay # Actor Network (w/ Target Network) self.actor_local = MADDPGActorVersion2(state_size, action_size, random_seed, fc1_units=fc1_units, fc2_units=fc2_units).to(device) self.actor_target = MADDPGActorVersion2(state_size, action_size, random_seed, fc1_units=fc1_units, fc2_units=fc2_units).to(device) self.actor_optimizer = optim.Adam(self.actor_local.parameters(), lr=lr_actor) # Critic Network (w/ Target Network) self.critic_local = MADDPGCriticVersion3(num_agents, state_size, action_size, fcs1_units=fc1_units, fc2_units=fc2_units, seed=random_seed).to(device) self.critic_target = MADDPGCriticVersion3(num_agents, state_size, action_size, fcs1_units=fc1_units, fc2_units=fc2_units, seed=random_seed).to(device) self.critic_optimizer = optim.Adam(self.critic_local.parameters(), lr=lr_critic) # Noise process for action # Noise process self.noise = OUNoise(self.action_size, exploration_mu, exploration_theta, exploration_sigma) # parameter of discounted reward self.gamma = gamma # soft update parameter self.tau = tau self.batch_size = batch_size def step(self, states, actions, rewards, next_states, dones): """ Args: states (numpy.array): states.shape[1] = (state_size*num_agents) actions (numpy.array): actions.shape[1] = (actions_size*num_agents) next_states (numpy.array): next_states.shape[1] = (state_size*num_agents) """ self.learn(states, actions, rewards, next_states, dones) def act(self, state, add_noise=True): """ Returns actions for given state. The input size of actor networks is state_size. """ state = torch.from_numpy(state).float().to(device) with torch.no_grad(): self.actor_local.eval() action = self.actor_local(state).cpu().data.numpy() self.actor_local.train() if add_noise: action += self.epsilon * self.noise.sample() return np.clip(action, -1, 1) def reset(self): self.noise.reset() def forward_all(self, next_states): """ Get next_actions. This is a chain-of-responsibility design pattern. (Broker chain) Return: 1d differentiable tensor of next_actions. """ q = ActionQuery() for i, agent in enumerate(self.game): # get next_state_i of agent_i n_state = next_states[:, i*self.state_size: (i+1)*self.state_size] # pdb.set_trace() if agent == self: detach = False else: detach = True # predict next_action and append it to actionQuery.actions agent.query(n_state, q, detach) return q.next_actions def query(self, next_state, q, detach): """ Args: q (class ActionQuery): parcel that stores actions """ next_action = self.actor_local(next_state) if detach is True: next_action = next_action.detach() if q.next_actions is None: q.next_actions = next_action else: q.next_actions = torch.cat((q.next_actions, next_action), dim=1) # pdb.set_trace() def learn(self, states, actions, rewards, next_states, dones): """Update policy and value parameters using given batch of experience tuples. For agent i: Q_target_i = r_i + gamma * critic_target(next_state, actor_target(next_state)) where: actor_target(state) -> actions for all agent critic_target(state, action) -> Q-value Args: experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples gamma (float): discount factor """ # divide fields update agent number i experience_unpacks = ExperienceUnpack(states, actions, rewards, next_states, dones, self.state_size, self.action_size, self.num_agents) # upack field in agent_i if self.index_agent is None: self.index_agent = self.game.index_of_agent(self) # pdb.set_trace() states_i, actions_i, rewards_i, next_states_i, dones_i = experience_unpacks[self.index_agent] # assert (states_i.shape[1] == (self.state_size)), 'Wrong shape of states_i' # assert (actions_i.shape[1] == (self.action_size)), 'Wrong shape of actions_i' # assert (rewards_i.shape[1] == (1)), 'Wrong shape of rewards_i' # assert (dones_i.shape[1] == (1)), 'Wrong shape of dones_i' # train critic # loss fuction = Q_target(TD 1-step boostrapping) - Q_local(current) next_actions = self.forward_all(next_states) assert (next_actions.shape[1] == (self.action_size * self.num_agents)), 'Wrong shape of next_actions' Q_targets_next = self.critic_target(next_states, next_actions) Q_target_i = rewards_i + (self.gamma * Q_targets_next * (1-dones_i)) Q_expected = self.critic_local(states, actions) critic_loss = F.mse_loss(Q_expected, Q_target_i) self.critic_optimizer.zero_grad() critic_loss.backward() torch.nn.utils.clip_grad_norm_(self.critic_local.parameters(), self.max_norm) self.critic_optimizer.step() # train actor actions_pred = self.forward_all(states) actor_loss = - self.critic_local(states, actions).mean() self.actor_optimizer.zero_grad() actor_loss.backward() self.actor_optimizer.step() # update critic self.soft_update(self.critic_local, self.critic_target, self.tau) # update actors self.soft_update(self.actor_local, self.actor_target, self.tau) #------ update noise ---# self.epsilon = max(self.epsilon * self.epsilon_decay, self.epsilon_end) self.noise.reset() def soft_update(self, local_model, target_model, tau): """Soft update model parameters. θ_target = τ*θ_local + (1 - τ)*θ_target Args: local_model: PyTorch model (weights will be copied from) target_model: PyTorch model (weights will be copied to) tau (float): interpolation parameter """ for target_param, local_param in zip(target_model.parameters(), local_model.parameters()): target_param.data.copy_(tau * local_param.data + (1.0 - tau) * target_param.data) def model_dicts(self): m_dicts = {'critic_{}'.format(self.name): self.critic_target, 'actor_{}'.format(self.name): self.actor_target} return m_dicts
a = int(input()) b = int(input()) c = int(input()) d = int(input()) for first_row_first_num in range(a, b + 1): for first_row_second_num in range(a, b + 1): for second_row_first_num in range(c, d + 1): for second_row_second_num in range(c, d + 1): if (first_row_first_num + second_row_second_num) == (first_row_second_num + second_row_first_num) \ and first_row_first_num != first_row_second_num \ and second_row_first_num != second_row_second_num: print(f"{first_row_first_num}{first_row_second_num}") print(f"{second_row_first_num}{second_row_second_num}\n")
# Generated by Django 2.1.5 on 2019-02-03 15:37 import backend.company.models from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('company', '0002_auto_20190130_1235'), ] operations = [ migrations.AddField( model_name='company', name='small_logo', field=models.ImageField(null=True, upload_to=backend.company.models.generate_small_logo_upload_location), ), migrations.AlterField( model_name='company', name='logo', field=models.ImageField(null=True, upload_to=backend.company.models.generate_logo_upload_location), ), ]
# Author: Pieter de Jong import random def generateComplexNumber(): random.seed() real = random.randint(-100, 100) compl = random.randint(-100, 100) c = complex(real, coml) return c def isMandelbrot(c): z = 0 for i in range(100): z = pow(z, 2) + c absZ = abs(z) if absZ > 2: return False print "Is Mandelbrot!" return True # z = generateComplexNumber() # print z # isMandelbrot(z) # z = generateComplexNumber() # print z # isMandelbrot(z) print isMandelbrot(complex(-1, 0)
import pytest from pyspark.sql import SparkSession @pytest.fixture(scope="session") def spark_session(): spark = SparkSession.builder\ .appName('testing')\ .config('spark.driver.bindAddress', '127.0.0.1')\ .getOrCreate() yield spark spark.stop() @pytest.fixture(scope="module") def data_frame(spark_session): return spark_session.read.parquet('data/catalog.parquet')
def doTest(host, port): from tensorflow_serving.apis.predict_pb2 import PredictRequest from tensorflow_serving.apis.prediction_service_pb2_grpc import PredictionServiceStub from grpc import insecure_channel, StatusCode from tensorflow.contrib.util import make_tensor_proto, make_ndarray from tensorflow import float32 target = "%s:%s"%(host, port) print "Sending prediction request to", target, "\n" channel = insecure_channel(target) stub = PredictionServiceStub(channel) request = PredictRequest() request.model_spec.name = "campaign" request.model_spec.signature_name = "" request.inputs["hour"].CopyFrom(make_tensor_proto(6, shape=[1], dtype=float32)) request.inputs["week"].CopyFrom(make_tensor_proto(5, shape=[1], dtype=float32)) request.inputs["sid"].CopyFrom(make_tensor_proto("47320", shape=[1])) request.inputs["sspid"].CopyFrom(make_tensor_proto("3", shape=[1])) request.inputs["country"].CopyFrom(make_tensor_proto("DK", shape=[1])) request.inputs["os"].CopyFrom(make_tensor_proto("6", shape=[1])) request.inputs["domain"].CopyFrom(make_tensor_proto("video9.in", shape=[1])) request.inputs["isp"].CopyFrom(make_tensor_proto("Tele Danmark", shape=[1])) request.inputs["browser"].CopyFrom(make_tensor_proto("4", shape=[1])) request.inputs["type"].CopyFrom(make_tensor_proto("site", shape=[1])) request.inputs["lat"].CopyFrom(make_tensor_proto(35000, shape=[1], dtype=float32)) request.inputs["lon"].CopyFrom(make_tensor_proto(105000, shape=[1], dtype=float32)) request.inputs["connectiontype"].CopyFrom(make_tensor_proto("2", shape=[1])) request.inputs["devicetype"].CopyFrom(make_tensor_proto("1", shape=[1])) request.inputs["donottrack"].CopyFrom(make_tensor_proto("0", shape=[1])) request.inputs["userid"].CopyFrom(make_tensor_proto("984273063", shape=[1])) request.inputs["ua"].CopyFrom(make_tensor_proto("Mozilla/5.0 (Linux; U; Android 5.1.1; en-US; Redmi Note 3 Build/LMY47V) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 UCBrowser/11.0.8.855 U3/0.8.0 Mobile Safari/534.30", shape=[1])) (result, status) = stub.Predict.with_call(request) if status.code() != StatusCode.OK: print "call failed", status return predictions = make_ndarray(result.outputs["classes"]) if predictions.size == 0: print "no predition replied" return cidIndex = predictions[0] print "Server predict with index", cidIndex if __name__ == "__main__": from sys import argv, exit if argv.__len__() != 3: print "Usage: python test.py [host] [port]\n" exit(0) doTest(argv[1], argv[2])
from sklearn.model_selection import train_test_split from sklearn.utils import shuffle as shuffle_data class BaseDataLoader(): def __init__(self, data_handler, shuffle, test_split, random_state, stratify, training): dh = data_handler if dh.X_data_test is dh.y_data_test is None: if 0 < test_split < 1: stratify = dh.y_data if stratify else None X_train, X_test, y_train, y_test = train_test_split(dh.X_data, dh.y_data, test_size=test_split, random_state=random_state, shuffle=shuffle, stratify=stratify) self.X_out, self.y_out = (X_train, y_train) if training else (X_test, y_test) print("Training and test sets created regarding defined test_split percentage.") else: self.X_out, self.y_out = dh.X_data, dh.y_data if shuffle: self.X_out, self.y_out = shuffle_data(self.X_out, self.y_out, random_state=random_state) print("Whole dataset is used for training.") elif dh.X_data_test is not None and dh.y_data_test is not None: self.X_out, self.y_out = (dh.X_data, dh.y_data) if training \ else (dh.X_data_test, dh.y_data_test) if shuffle: self.X_out, self.y_out = shuffle_data(self.X_out, self.y_out, random_state=random_state) print("For training and testing separate datasets configured in data_handler will be used.") else: raise ValueError('data_handler not configured properly.') def get_data(self): print(f"Number of loaded data instances: {len(self.X_out)}") return self.X_out, self.y_out
class TargetNotGeneratedErr(Exception): pass class CompilationFailedErr(Exception): pass class CmdFailedErr(Exception): pass class NotFileErr(Exception): pass class DepIsGenerated(Exception): pass class LineParseErr(Exception): pass class CleanExitErr(Exception): pass
SIZE = 400 GRID_LEN = 4 GRID_PADDING = 6 FONT = ("Verdana", 40, "bold") BACKGROUND_COLOR_GAME = "#57407C" CELL_COLOR_EMPTY = "#3D2963" CELL_COLOR_CORRECT = "#E88A45" CELL_COLOR_INCORRECT = "#6AC6B8" TEXT_COLOR = "white"
from django.conf.urls import patterns, include, url urlpatterns = patterns('', url(r'join/$','api.views.new_user'), #Creates and logs in a user url(r'login/$','api.views.login_empous_user'), #logs in an empous user url(r'generatetoken/$', 'api.views.generate_token'), url(r'resetpassword/$', 'api.views.reset_password'), url(r'invite/$','api.views.invite_by_username_or_email'), url(r'check/$','api.views.check_username_free'), url(r'friends/$','api.views.get_friends_for_user'),#Returns all the empous users a player has played with or are FB friends with url(r'create/$','api.views.create_game'), url(r'update/$','api.views.update_game'), url(r'complete/$','api.views.completed_game'), url(r'gamelist/$','api.views.game_list'), url(r'numplayablegames/$','api.views.number_playable_games'), url(r'completelist/$','api.views.last_5_completed_games'), url(r'cancreategame/$','api.views.can_create_games'), url(r'changematchmaking/$','api.views.change_matchmaking'), url(r'randomplayer/$','api.views.random_empous_player') )
# this is used for regularization class Intersection2(chainer.Link): def __init__(self, outdim, numnet): super(Intersection2, self).__init__() self.outdim = outdim self.numnet = numnet with self.init_scope(): W = chainer.initializers.One() self.W = variable.Parameter(W) self.W.initialize((self.numnet, 1)) def __call__(self, x): if self.outdim == 1: weight = F.relu(self.W.T) else: weight = F.relu(self.make_weight(self.W)) return F.matmul(weight, x) def make_weight(self, array): weight_matrix = xp.zeros((self.outdim, self.outdim * self.numnet), dtype=xp.float32) for i in range(self.numnet): q = xp.array(array[i, 0].data, dtype=xp.float32) weight_matrix[:, i * self.outdim:(i + 1) * self.outdim] = xp.identity(self.outdim, dtype=xp.float32) * q return Variable(weight_matrix) class Generator(chainer.Chain): def __init__(self, dim=784, num_nets=784, latent=100, wscale=0.02): super(Generator, self).__init__() self.dim = dim self.num_nets = num_nets self.wscale = wscale self.n_hidden = latent with self.init_scope(): self.inter = Intersection2(self.dim, self.num_nets) for net in range(self.num_nets): w = chainer.initializers.Normal(self.wscale) b = chainer.initializers.Normal(self.wscale) setattr(self, "l1_{}".format(net), L.Linear(None, 100, initialW=w, initial_bias=b)) setattr(self, "l2_{}".format(net), L.Linear(None, 100, initialW=w, initial_bias=b)) setattr(self, "l3_{}".format(net), L.Linear(None, 28 * 28, initialW=w, initial_bias=b)) # set batchnormalization setattr(self, "bn1_{}".format(net), L.BatchNormalization(size=100)) setattr(self, "bn2_{}".format(net), L.BatchNormalization(size=100)) def make_hidden(self, batchsize): return xp.random.normal(0, 1, (batchsize, self.n_hidden, 1, 1)).astype(xp.float32) def __call__(self, z, test=False): for net in range(self.num_nets): h = F.relu(getattr(self, 'bn1_{}'.format(net))(getattr(self, 'l1_{}'.format(net))(z))) # h = F.relu(getattr(self, 'bn1_{}'.format(net))(getattr(self, 'l1_{}'.format(net))(z))) # h2 = F.relu(getattr(self, 'l2_{}'.format(net))(h)) h = F.relu(getattr(self, 'bn2_{}'.format(net))(getattr(self, 'l2_{}'.format(net))(h))) h2 = F.sigmoid(getattr(self, 'l3_{}'.format(net))(h)) if net == 0: X = h2 else: X = F.concat((X, h2), axis=1) batchsize = X.shape[0] X = X.reshape(batchsize, self.num_nets * self.dim) # x = self.inter(X.T).T.data x = self.inter(X.T).T # x = Variable(xp.reshape(x, (batchsize, 1, 28, 28))) x = F.reshape(x, (-1, 1, 28, 28)) return x class Critic(chainer.Chain): def __init__(self, num_nets=784, wscale=0.02): super(Critic, self).__init__() self.num_nets = num_nets self.wscale = wscale with self.init_scope(): self.inter = Intersection2(1, self.num_nets) for net in range(self.num_nets): w = chainer.initializers.Normal(self.wscale) b = chainer.initializers.Normal(self.wscale) setattr(self, "l1_{}".format(net), L.Linear(None, 100, initialW=w, initial_bias=b)) setattr(self, "l2_{}".format(net), L.Linear(None, 100, initialW=w, initial_bias=b)) # setattr(self, "l3_{}".format(net), L.Linear(None, 800, initialW = w, initial_bias = b)) setattr(self, "l4_{}".format(net), L.Linear(None, 1, initialW=w, initial_bias=b)) # set batchnormalization # setattr(self, "bn1_{}".format(net), L.BatchNormalization(size=800)) # setattr(self, "bn2_{}".format(net), L.BatchNormalization(size=800)) # setattr(self, "bn3_{}".format(net), L.BatchNormalization(size=800)) # self.bn = L.BatchNormalization(size=2) def __call__(self, x, test=False): x = x.reshape(100, 784) for net in range(self.num_nets): # ここでhがnanになることで全てがnanになる(xは確かにnanではない)(そしてそれはその前のupdateでWがnanになってるから) # h = F.leaky_relu(getattr(self, 'bn1_{}'.format(net))(getattr(self, 'l1_{}'.format(net))(x))) h = F.leaky_relu(getattr(self, 'l1_{}'.format(net))(x)) # h = F.leaky_relu(getattr(self, 'bn2_{}'.format(net))(getattr(self, 'l2_{}'.format(net))(h))) h = F.leaky_relu(getattr(self, 'l2_{}'.format(net))(h)) # h = F.leaky_relu(getattr(self, 'bn3_{}'.format(net))(getattr(self, 'l3_{}'.format(net))(h))) # h = F.leaky_relu(getattr(self, 'l3_{}'.format(net))(h)) h2 = getattr(self, 'l4_{}'.format(net))(h) if net == 0: # Y = h2.reshape(64, 1) Y = h2 else: # Y = F.concat((Y, h2.reshape(64, 1)), axis = 1) Y = F.concat((Y, h2), axis=1) y = self.inter(Y.T).T # y = self.inter(self.bn(Y).T).T return y
import unittest from mcalc import get_prot_mass class MolecularMassTestCase(unittest.TestCase): def test_lower(self): result_lower = get_prot_mass('mpfmvnniyvsfceikeivcaggsttkyadvlqenneqgrtvklq') self.assertEqual(result_lower, 5051.7509) def test_gaps(self): result_gaps = get_prot_mass('MPFMVNNIYVSF CEIKEIV CAGGSTTKYADVLQEN NEQGRTVKLQ') self.assertEqual(result_gaps, 5051.7509) def test_numbers(self): # check that the function fails when fed values other than strings with self.assertRaises(AttributeError): get_prot_mass(123) def test_amino_acids(self): # check that the function fails when fed values other than strings containing real amino acid symbols with self.assertRaises(ValueError): get_prot_mass('MPFMVNNIYVSF528ZZZ') if __name__ == '__main__': unittest.main()
import os import sys from flask import Flask, request, abort, jsonify, render_template, url_for from flask_cors import CORS import traceback from models import setup_db, SampleLocation, db_drop_and_create_all def create_app(test_config=None): # create and configure the app app = Flask(__name__) setup_db(app) CORS(app) """ uncomment at the first time running the app """ db_drop_and_create_all() @app.route('/', methods=['GET']) def home(): return render_template( 'map.html', map_key=os.getenv('GOOGLE_MAPS_API_KEY', 'GOOGLE_MAPS_API_KEY_WAS_NOT_SET?!') ) @app.route("/api/store_item") def store_item(): try: latitude = float(request.args.get('lat')) longitude = float(request.args.get('lng')) description = request.args.get('description') location = SampleLocation( description=description, geom=SampleLocation.point_representation(latitude=latitude, longitude=longitude) ) location.insert() return jsonify( { "success": True, "location": location.to_dict() } ), 200 except: exc_type, exc_value, exc_traceback = sys.exc_info() app.logger.error(traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2)) abort(500) @app.route("/api/get_items_in_radius") def get_items_in_radius(): try: latitude = float(request.args.get('lat')) longitude = float(request.args.get('lng')) radius = int(request.args.get('radius')) locations = SampleLocation.get_items_within_radius(latitude, longitude, radius) return jsonify( { "success": True, "results": locations } ), 200 except: exc_type, exc_value, exc_traceback = sys.exc_info() app.logger.error(traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2)) abort(500) @app.errorhandler(500) def server_error(error): return jsonify({ "success": False, "error": 500, "message": "server error" }), 500 return app app = create_app() if __name__ == '__main__': port = int(os.environ.get("PORT",5000)) app.run(host='127.0.0.1',port=port,debug=True) © 2022 GitHub, Inc. Terms Privacy Security Status Docs
import os import sqlite3 import textwrap from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly def get_protonmailMessages(files_found, report_folder, seeker, wrap_text): for file_found in files_found: file_found = str(file_found) if not file_found.endswith('MessagesDatabase.db'): continue # Skip all other files db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' SELECT datetime(messagev3.Time,'unixepoch') AS 'Message Timestamp', messagev3.Subject AS 'Subject', messagev3.Sender_SenderSerialized AS 'Sender', CASE messagev3.Type WHEN 0 THEN 'Incoming' WHEN 2 THEN 'Outgoing' END AS 'Message Direction', CASE messagev3.Unread WHEN 0 THEN 'Read' WHEN 1 THEN 'Unread' END AS 'Status', messagev3.Size AS 'Message Size', CASE messagev3.AccessTime WHEN 0 THEN '' ELSE datetime(messagev3.AccessTime/1000,'unixepoch') END AS 'Accessed Timestamp', CASE messagev3.Location WHEN 0 THEN 'Inbox' WHEN 7 THEN 'Sent' END AS 'Folder', attachmentv3.file_name AS 'Attachment Name', attachmentv3.file_size AS 'Attachment Size', messagev3.ToList AS 'To List', messagev3.ReplyTos AS 'Reply To', messagev3.CCList AS 'CC List', messagev3.BCCList AS 'BCC List', messagev3.Header AS 'Message Header' FROM messagev3 LEFT JOIN attachmentv3 ON attachmentv3.message_id = messagev3.ID ORDER BY messagev3.Time ASC ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('ProtonMail - Messages') report.start_artifact_report(report_folder, 'ProtonMail - Messages') report.add_script() data_headers = ('Message Timestamp','Subject','Sender','Message Direction','Status','Message Size','Accessed Timestamp','Folder','Attachment Name','Attachment Size','Message Header') data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[14])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'ProtonMail - Messages' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'ProtonMail - Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No ProtonMail - Messages data available') db.close()
import importlib def load(module): wrapper_module = importlib.import_module( "tacorn." + module + "_wrapper") return wrapper_module
import agate def get_buffalo_data(data): buffalo_data = data.where(lambda row: row['dest_city'] == 'Buffalo') buffalo_data.to_csv('output/dest-buffalo-2002-2015.csv') def get_full_data(): fulldata = agate.Table.from_csv('original/WRAPS-arrivals-by-destination-2002-2015-clean.csv') return fulldata def data_by_year(data): by_origin = data.group_by('origin') column_names = ['origin','2002','2003','2004','2005','2006','2007','2008','2009','2010','2011','2012','2013','2014','2015'] rows = [] for i, origin in enumerate(by_origin): # Order by year for sanity check ordered = origin.order_by('year') # THIS SEEMS DUMB but just wanna get things done # Not all years are available for every country so default to zero origin_row = [ordered.rows[0]['origin'],0,0,0,0,0,0,0,0,0,0,0,0,0,0] total_sum = 0 for row in ordered.rows: # Create list of values by year year_index = column_names.index(str(row['year'])) origin_row[year_index] = row['arrivals'] total_sum += row['arrivals'] origin_row.append(total_sum) rows.append(origin_row) # Add a 'total' column header column_names.append('total') origin_by_year = agate.Table(rows, column_names) origin_by_year.to_csv('output/new-origin_by_year-buffalo-2002-2015.csv') def init(): # fulldata = get_full_data() # get_buffalo_data(fulldata) buffalo_data = agate.Table.from_csv('original/new-buffalo-2002-2015-clean.csv') data_by_year(buffalo_data) init()
from django.urls import include, path from rest_framework.routers import DefaultRouter from job_position.api.views import JobPositionViewSet router = DefaultRouter() router.register('', JobPositionViewSet, basename='jobposition') urlpatterns = [ path('jobpositions/', include(router.urls), name='jobpositions') ]
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import copy from unittest.mock import patch from django.test import TestCase, override_settings from apps.log_databus.exceptions import CollectorConfigNotExistException from apps.log_databus.handlers.collector import CollectorHandler from apps.log_databus.constants import LogPluginInfo from apps.exceptions import ApiRequestError, ApiResultError from .test_collectorhandler import TestCollectorHandler from ...log_databus.serializers import CollectorCreateSerializer from ...utils.drf import custom_params_valid BK_DATA_ID = 1 TABLE_ID = "2_log.test_table" SUBSCRIPTION_ID = 2 TASK_ID = 3 NEW_TASK_ID = 4 LAST_TASK_ID = 5 PARAMS = { "bk_biz_id": 706, "collector_config_name": "采集项名称", "collector_scenario_id": "row", "category_id": "application", "target_object_type": "HOST", "target_node_type": "TOPO", "target_nodes": [ {"bk_inst_id": 33, "bk_obj_id": "module"}, ], "data_encoding": "UTF-8", "bk_data_name": "abc", "description": "这是一个描述", "params": { "paths": ["/log/abc"], "conditions": { "type": "match", "match_type": "include", "match_content": "delete", "separator": "|", "separator_filters": [ {"fieldindex": 1, "word": "val1", "op": "=", "logic_op": "or"}, {"fieldindex": 2, "word": "val2", "op": "=", "logic_op": "or"}, ], }, "tail_files": True, "ignore_older": 1, "max_bytes": 1, }, "storage_cluster_id": "default", "storage_expires": 1, } DELETE_MSG = {"result": True} PART_FAILED_INSTANCE_DATA = { "instances": [ { "status": "FAILED", "host_statuses": [ {"status": "UNKNOWN", "version": "3.0.10", "name": "unifytlogc"}, {"status": "UNKNOWN", "version": "3.0.10", "name": "unifytlogc"}, ], "running_task": None, "instance_id": "host|instance|host|127.0.0.1-0-0", "create_time": "2019-09-19T20:32:19.957883", "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, }, { "status": "SUCCESS", "host_statuses": [ {"status": "RUNNING", "version": "3.0.10", "name": "unifytlogc"}, {"status": "RUNNING", "version": "3.0.10", "name": "unifytlogc"}, ], "running_task": None, "instance_id": "host|instance|host|127.0.0.1-0-0", "create_time": "2019-09-19T20:32:19.957883", "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, }, ], "subscription_id": SUBSCRIPTION_ID, } FAILED_INSTANCE_DATA = { "instances": [ { "status": "FAILED", "host_statuses": [ {"status": "UNKNOWN", "version": "3.0.10", "name": "unifytlogc"}, {"status": "UNKNOWN", "version": "3.0.10", "name": "unifytlogc"}, ], "running_task": None, "instance_id": "host|instance|host|127.0.0.1-0-0", "create_time": "2019-09-19T20:32:19.957883", "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, } ], "subscription_id": SUBSCRIPTION_ID, } SUCCESS_INSTANCE_DATA = { "instances": [ { "status": "SUCCESS", "host_statuses": [ {"status": "RUNNING", "version": "3.0.10", "name": "unifytlogc"}, {"status": "RUNNING", "version": "3.0.10", "name": "unifytlogc"}, ], "running_task": None, "instance_id": "host|instance|host|127.0.0.1-0-0", "create_time": "2019-09-19T20:32:19.957883", "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, } ], "subscription_id": SUBSCRIPTION_ID, } RUNNING_INSTANCE_DATA = { "instances": [ { "status": "RUNNING", "host_statuses": [ {"status": "PENDING", "version": "3.0.10", "name": "unifytlogc"}, {"status": "PENDING", "version": "3.0.10", "name": "unifytlogc"}, ], "running_task": None, "instance_id": "host|instance|host|127.0.0.1-0-0", "create_time": "2019-09-19T20:32:19.957883", "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, } ], "subscription_id": SUBSCRIPTION_ID, } INSTANCE_DATA_RETURN = { "FAILED": [ { "status": "FAILED", "ip": "127.0.0.1", "bk_cloud_id": 0, "instance_id": "host|instance|host|127.0.0.1-0-0", "instance_name": "127.0.0.1", "plugin_version": "3.0.10", "bk_supplier_id": "0", "create_time": "2019-09-19T20:32:19.957883", } ], "SUCCESS": [ { "status": "SUCCESS", "ip": "127.0.0.1", "bk_cloud_id": 0, "instance_id": "host|instance|host|127.0.0.1-0-0", "instance_name": "127.0.0.1", "plugin_version": "3.0.10", "bk_supplier_id": "0", "create_time": "2019-09-19T20:32:19.957883", } ], } STATUS_DATA = [PART_FAILED_INSTANCE_DATA] STATUS_DATA_RETURN = [ { "status": "FAILED", "status_name": "失败", "ip": "127.0.0.1", "bk_cloud_id": 0, "instance_id": "host|instance|host|127.0.0.1-0-0", "instance_name": "127.0.0.1", "plugin_name": "unifytlogc", "plugin_version": "3.0.10", "bk_supplier_id": "0", "create_time": "2019-09-19T20:32:19.957883", }, { "status": "SUCCESS", "status_name": "正常", "ip": "127.0.0.1", "bk_cloud_id": 0, "instance_id": "host|instance|host|127.0.0.1-0-0", "instance_name": "127.0.0.1", "plugin_name": "unifytlogc", "plugin_version": "3.0.10", "bk_supplier_id": "0", "create_time": "2019-09-19T20:32:19.957883", }, ] TOPO_TREE = [ { "host_count": 0, "default": 0, "bk_obj_name": "业务", "bk_obj_id": "biz", "service_instance_count": 0, "child": [ { "host_count": 0, "default": 0, "bk_obj_name": "test", "bk_obj_id": "test", "service_instance_count": 0, "child": [], "service_template_id": 0, "bk_inst_id": 4, "bk_inst_name": "test", } ], "service_template_id": 0, "bk_inst_id": 4, "bk_inst_name": "日志平台-测试1", } ] TOPO_TREE_RETURN = { "biz|4": { "host_count": 0, "default": 0, "bk_obj_name": "业务", "bk_obj_id": "biz", "service_instance_count": 0, "child": [ { "host_count": 0, "default": 0, "bk_obj_name": "test", "bk_obj_id": "test", "service_instance_count": 0, "child": [], "service_template_id": 0, "bk_inst_id": 4, "bk_inst_name": "test", "node_link": ["biz|4", "test|4"], } ], "service_template_id": 0, "bk_inst_id": 4, "bk_inst_name": "日志平台-测试1", "node_link": ["biz|4"], }, "test|4": { "host_count": 0, "default": 0, "bk_obj_name": "test", "bk_obj_id": "test", "service_instance_count": 0, "child": [], "service_template_id": 0, "bk_inst_id": 4, "bk_inst_name": "test", "node_link": ["biz|4", "test|4"], }, } SEARCH_HOST_DATA = { "count": 3, "info": [ { "host": { "bk_cpu": 8, "bk_isp_name": "1", "bk_os_name": "linux centos", "bk_province_name": "440000", "bk_host_id": 1, "import_from": "2", "bk_os_version": "7.4.1708", "bk_disk": 639, "operator": "", "docker_server_version": "1.12.4", "create_time": "2019-05-17T12:40:29.212+08:00", "bk_mem": 32012, "bk_host_name": "VM_1_10_centos", "last_time": "2019-09-11T11:27:37.318+08:00", "bk_host_innerip": "127.0.0.1", "bk_comment": "", "docker_client_version": "1.12.4", "bk_os_bit": "64-bit", "bk_outer_mac": "", "bk_asset_id": "", "bk_service_term": "null", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_sla": "null", "bk_cpu_mhz": 2499, "bk_host_outerip": "", "bk_state_name": "CN", "bk_os_type": "1", "bk_mac": "52:54:00:0a:ac:26", "bk_bak_operator": "", "bk_supplier_account": "0", "bk_sn": "", "bk_cpu_module": "Intel(R) Xeon(R) Gold 61xx CPU", }, "set": [], "biz": [ { "bk_biz_id": 2, "language": "1", "life_cycle": "2", "bk_biz_developer": "", "bk_biz_maintainer": "admin,jx", "bk_biz_tester": "", "time_zone": "Asia/Shanghai", "default": 0, "create_time": "2019-05-17T12:38:29.549+08:00", "bk_biz_productor": "admin", "bk_supplier_account": "0", "operator": "", "bk_biz_name": "蓝鲸", "last_time": "2019-09-29T10:28:37.748+08:00", "bk_supplier_id": 0, } ], "module": [], }, { "host": { "bk_cpu": 8, "bk_isp_name": "2", "bk_os_name": "linux centos", "bk_province_name": "440000", "bk_host_id": 2, "import_from": "2", "bk_os_version": "7.4.1708", "bk_disk": 147, "operator": "", "docker_server_version": "", "create_time": "2019-05-17T12:40:33.671+08:00", "bk_mem": 32012, "bk_host_name": "VM_1_11_centos", "last_time": "2019-05-17T15:53:41.676+08:00", "bk_host_innerip": "127.0.0.1", "bk_comment": "", "docker_client_version": "", "bk_os_bit": "64-bit", "bk_outer_mac": "", "bk_asset_id": "", "bk_service_term": "null", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_sla": "null", "bk_cpu_mhz": 1999, "bk_host_outerip": "", "bk_state_name": "CN", "bk_os_type": "1", "bk_mac": "52:54:00:f8:42:96", "bk_bak_operator": "", "bk_supplier_account": "0", "bk_sn": "", "bk_cpu_module": "AMD EPYC Processor", }, "set": [], "biz": [ { "bk_biz_id": 2, "language": "1", "life_cycle": "2", "bk_biz_developer": "", "bk_biz_maintainer": "admin,jx", "bk_biz_tester": "", "time_zone": "Asia/Shanghai", "default": 0, "create_time": "2019-05-17T12:38:29.549+08:00", "bk_biz_productor": "admin", "bk_supplier_account": "0", "operator": "", "bk_biz_name": "蓝鲸", "last_time": "2019-09-29T10:28:37.748+08:00", "bk_supplier_id": 0, } ], "module": [], }, { "host": { "bk_cpu": 8, "bk_isp_name": "3", "bk_os_name": "linux centos", "bk_province_name": "440000", "bk_host_id": 3, "import_from": "2", "bk_os_version": "7.4.1708", "bk_disk": 639, "operator": "", "docker_server_version": "1.12.4", "create_time": "2019-05-17T12:40:37.473+08:00", "bk_mem": 32012, "bk_host_name": "rbtnode1", "last_time": "2019-09-11T11:26:43.887+08:00", "bk_host_innerip": "127.0.0.1", "bk_comment": "", "docker_client_version": "1.12.4", "bk_os_bit": "64-bit", "bk_outer_mac": "", "bk_asset_id": "", "bk_service_term": "null", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_sla": "null", "bk_cpu_mhz": 1999, "bk_host_outerip": "", "bk_state_name": "CN", "bk_os_type": "1", "bk_mac": "52:54:00:f2:b3:a6", "bk_bak_operator": "", "bk_supplier_account": "0", "bk_sn": "", "bk_cpu_module": "AMD EPYC Processor", }, "set": [], "biz": [ { "bk_biz_id": 2, "language": "1", "life_cycle": "2", "bk_biz_developer": "", "bk_biz_maintainer": "admin,jx", "bk_biz_tester": "", "time_zone": "Asia/Shanghai", "default": 0, "create_time": "2019-05-17T12:38:29.549+08:00", "bk_biz_productor": "admin", "bk_supplier_account": "0", "operator": "", "bk_biz_name": "蓝鲸", "last_time": "2019-09-29T10:28:37.748+08:00", "bk_supplier_id": 0, } ], "module": [], }, ], } TASK_DETAIL_DATA = { "status": "FAILED", "task_id": 24626, "finish_time": "null", "start_time": "2019-09-19 15:07:13", "instance_id": "host|instance|host|127.0.0.1-0-0", "pipeline_id": "0242b9eebb0b355aa4f9d6e41acd8d68", "create_time": "2019-09-19 15:07:13", "steps": [ { "status": "FAILED", "target_hosts": [ { "status": "FAILED", "pipeline_id": "cf9b9eb54bf33e1799294e9ce2e2d3ba", "create_time": "2019-09-19 15:07:13", "finish_time": "null", "start_time": "2019-09-19 15:07:14", "node_name": "[unifytlogc] 下发插件配置 0:127.0.0.1", "sub_steps": [ { "status": "SUCCESS", "inputs": {"status": "UNKNOWN", "host_status_id": 105981, "_loop": 0}, "log": "", "index": 0, "finish_time": "2019-09-19 15:07:14", "start_time": "2019-09-19 15:07:14", "node_name": "更新插件部署状态", "pipeline_id": "efe41658ac4d3c0b99dafa134f8e6549", "create_time": "2019-09-19 15:07:13", "ex_data": "null", }, { "status": "SUCCESS", "inputs": { "file_params": [], "host_status_id": 105981, "_loop": 0, "ip_list": [{"ip": "127.0.0.1", "bk_supplier_id": "0", "bk_cloud_id": "0"}], "config_instance_ids": [], "job_client": {"username": "admin", "bk_biz_id": "2", "os_type": "linux"}, "subscription_step_id": 46813, }, "log": "[2019-09-19 15:07:14 INFO] JobPushMultipleConfigFileService called with params", "index": 1, "finish_time": "2019-09-19 15:07:17", "start_time": "2019-09-19 15:07:14", "node_name": "渲染并下发配置", "pipeline_id": "327b1793e3c3300ba683bbfe57985aa6", "create_time": "2019-09-19 15:07:13", "ex_data": "null", }, { "status": "FAILED", "inputs": { "control": { "stop_cmd": "./stop.sh unifytlogc", "health_cmd": "./unifytlogc -z", "reload_cmd": "./reload.sh unifytlogc", "start_cmd": "./start.sh unifytlogc", "version_cmd": "./unifytlogc -v", "kill_cmd": "./stop.sh unifytlogc", "restart_cmd": "./restart.sh unifytlogc", }, "exe_name": "null", "_loop": 0, "setup_path": "/usr/local/gse/plugins/bin", "pid_path": "/var/run/gse/unifytlogc.pid", "proc_name": "unifytlogc", "hosts": [{"ip": "127.0.0.1", "bk_cloud_id": 0, "bk_supplier_id": 0}], "gse_client": {"username": "admin", "os_type": "linux"}, }, "log": "[2019-09-19 15:07:17 INFO] GSE register process success", "index": 2, "finish_time": "2019-09-19 15:07:25", "start_time": "2019-09-19 15:07:17", "node_name": "重载插件进程", "pipeline_id": "ecb56b3d98473b7f858919d776c2d58e", "create_time": "2019-09-19 15:07:13", "ex_data": "以下主机操作进程失败:127.0.0.1", }, { "status": "PENDING", "inputs": "null", "log": "", "index": 3, "finish_time": "null", "outputs": "null", "start_time": "null", "node_name": "更新插件部署状态", "pipeline_id": "aaf49227ba8736169ff8fafa5e7cfe42", "create_time": "null", "ex_data": "null", }, ], } ], "finish_time": "null", "start_time": "2019-09-19 15:07:13", "node_name": "[unifytlogc] 下发插件配置", "pipeline_id": "1c5278d835503842b2270e7554b145e0", "create_time": "2019-09-19 15:07:13", "action": "INSTALL", "type": "PLUGIN", "id": "unifytlogc", "extra_info": {}, } ], "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, } TASK_STATUS_DATA = [ { "status": "FAILED", "task_id": 24516, "finish_time": None, "start_time": "2019-09-17 19:23:03", "instance_id": "host|instance|host|127.0.0.1-0-0", "pipeline_id": "b087abf3072b3a85a4b00e7a1c3d90c2", "create_time": "2019-09-17 19:23:02", "steps": [ { "status": "FAILED", "target_hosts": [ { "status": "FAILED", "pipeline_id": "ca20fc59e7a138258f4e22beff18aaec", "create_time": "2019-09-17 19:23:02", "finish_time": None, "start_time": "2019-09-17 19:23:04", "node_name": "[unifytlogc] 下发插件配置 0:127.0.0.1", "sub_steps": [ { "status": "SUCCESS", "pipeline_id": "82fbf2706772353b8f59aa61f9084022", "create_time": "2019-09-17 19:23:02", "index": 0, "finish_time": "2019-09-17 19:23:04", "start_time": "2019-09-17 19:23:04", "node_name": "更新插件部署状态", }, { "status": "SUCCESS", "pipeline_id": "5d42f91ef09438729c67fa3add82778a", "create_time": "2019-09-17 19:23:02", "index": 1, "finish_time": "2019-09-17 19:23:07", "start_time": "2019-09-17 19:23:04", "node_name": "渲染并下发配置", }, { "status": "FAILED", "pipeline_id": "da862b51b1c43a2f8876f6c3e311d779", "create_time": "2019-09-17 19:23:02", "index": 2, "finish_time": "2019-09-17 19:23:16", "start_time": "2019-09-17 19:23:07", "node_name": "重载插件进程", }, { "status": "PENDING", "pipeline_id": "9ac0acd362ac388cb1e1cf34635c5991", "create_time": None, "index": 3, "finish_time": None, "start_time": None, "node_name": "更新插件部署状态", }, ], } ], "finish_time": None, "start_time": "2019-09-17 19:23:03", "node_name": "[unifytlogc] 下发插件配置", "pipeline_id": "874164bdd0a7355fb939533d34d063e3", "create_time": "2019-09-17 19:23:02", "action": "INSTALL", "type": "PLUGIN", "id": "unifytlogc", "extra_info": {}, } ], "instance_info": { "host": { "bk_host_name": "rbtnode1", "bk_supplier_account": "0", "bk_cloud_id": [ { "bk_obj_name": "", "id": "0", "bk_obj_id": "plat", "bk_obj_icon": "", "bk_inst_id": 0, "bk_inst_name": "default area", } ], "bk_host_innerip": "127.0.0.1", }, "service": {}, }, } ] CONFIG_DATA = { "data_id_config": {"option": {"encoding": "encoding data"}, "data_name": "data name"}, "result_table_config": "", "subscription_config": [ { "steps": [ { "config": {"plugin_name": LogPluginInfo.NAME, "plugin_version": LogPluginInfo.VERSION}, "type": "PLUGIN", "id": LogPluginInfo.NAME, "params": { "context": { "dataid": BK_DATA_ID, "local": [ { "paths": ["testlogic_op"], "delimiter": "|", "filters": [ {"conditions": [{"index": 1, "key": "val1", "op": "="}]}, {"conditions": [{"index": 1, "key": "val1", "op": "="}]}, ], "encoding": "UTF-8", } ], } }, } ] } ], } class CCModuleTest(object): """ mock CCApi.search_module """ def bulk_request(self, params=None): return [] class CCBizHostsTest(object): """ mock CCApi.list_biz_hosts """ def bulk_request(self, params=None): return [] class CCSetTest(object): """ mock CCApi.list_biz_hosts """ def bulk_request(self, params=None): return [] def subscription_statistic(params): return [ { "subscription_id": SUBSCRIPTION_ID, "status": [ {"status": "SUCCESS", "count": 0}, {"status": "PENDING", "count": 0}, {"status": "FAILED", "count": 0}, {"status": "RUNNING", "count": 0}, ], "versions": [], "instances": 0, } ] class TestCollector(TestCase): @patch("apps.api.TransferApi.create_data_id", lambda _: {"bk_data_id": BK_DATA_ID}) @patch("apps.api.TransferApi.create_result_table", lambda _: {"table_id": TABLE_ID}) @patch("apps.api.NodeApi.create_subscription", lambda _: {"subscription_id": SUBSCRIPTION_ID}) @patch("apps.api.NodeApi.subscription_statistic", subscription_statistic) @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": TASK_ID}) @patch("apps.api.NodeApi.switch_subscription", lambda _: {}) @patch("apps.api.NodeApi.check_subscription_task_ready", lambda _: True) @patch("apps.api.TransferApi.modify_data_id", lambda _: {"bk_data_id": BK_DATA_ID}) @patch("apps.api.CCApi.search_module", CCModuleTest()) @patch("apps.api.CCApi.list_biz_hosts", CCBizHostsTest()) @patch("apps.decorators.user_operation_record.delay", return_value=None) @override_settings(CACHES={"default": {"BACKEND": "django.core.cache.backends.dummy.DummyCache"}}) def test_create(self, *args, **kwargs): params = copy.deepcopy(PARAMS) params = custom_params_valid(serializer=CollectorCreateSerializer, params=params) params["params"]["conditions"]["type"] = "separator" result = CollectorHandler().update_or_create(params) self.assertEqual(result["bk_data_id"], BK_DATA_ID) self.assertEqual(result["collector_config_name"], params["collector_config_name"]) self.assertEqual(result["subscription_id"], SUBSCRIPTION_ID) self.assertEqual(result["task_id_list"], [str(TASK_ID)]) self._test_retrieve(result["collector_config_id"]) self._test_update(result["collector_config_id"]) self._test_run_subscription_task(result["collector_config_id"]) self._test_start(result["collector_config_id"]) self._test_retry_target_nodes(result["collector_config_id"]) self._test_delete_subscription(result["collector_config_id"]) self._test_get_target_mapping(result["collector_config_id"]) self._test_get_subscription_status(result["collector_config_id"]) self._test_get_subscription_task_detail(result["collector_config_id"]) self._test_get_subscription_task_status(result["collector_config_id"]) self._test_stop(result["collector_config_id"]) self._test_destroy(result["collector_config_id"]) @patch("apps.api.TransferApi.modify_data_id", lambda _: {"bk_data_id": BK_DATA_ID}) @patch("apps.api.TransferApi.modify_result_table", lambda _: {"table_id": TABLE_ID}) @patch("apps.api.NodeApi.update_subscription_info", lambda _: {"subscription_id": SUBSCRIPTION_ID}) @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": NEW_TASK_ID}) def _test_update(self, collector_config_id): params = copy.deepcopy(PARAMS) params["collector_config_id"] = collector_config_id new_collector_config_name = "新的名字" params["collector_config_name"] = new_collector_config_name params["target_nodes"] = [{"bk_inst_id": 34, "bk_obj_id": "module", "ip": "127.0.0.1", "bk_cloud_id": 1}] with self.assertRaises(CollectorConfigNotExistException): CollectorHandler(collector_config_id=9999) collector = CollectorHandler(collector_config_id=collector_config_id) result = collector.update_or_create(params) self.assertEqual(result["collector_config_name"], new_collector_config_name) self.assertListEqual( collector.data.target_subscription_diff, [ {"type": "add", "bk_inst_id": 34, "bk_obj_id": "module"}, {"type": "delete", "bk_inst_id": 33, "bk_obj_id": "module"}, ], ) @patch("apps.utils.thread.MultiExecuteFunc.append") @patch("apps.utils.thread.MultiExecuteFunc.run") @patch("apps.api.CCApi.search_biz_inst_topo", lambda _: []) @patch("apps.api.CCApi.search_set", CCSetTest()) def _test_retrieve(self, collector_config_id, mock_run, mock_append): collector = CollectorHandler(collector_config_id=collector_config_id) mock_append.return_value = "" mock_run.return_value = CONFIG_DATA result = collector.retrieve() self.assertEqual(result.get("data_encoding"), "UTF-8") self.assertIsNone(result.get("storage_cluster_id")) self.assertIsNone(result.get("retention")) self.assertEqual(result.get("collector_config_id"), collector_config_id) self.assertEqual(result.get("collector_scenario_id"), "row") @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": LAST_TASK_ID}) def _test_run_subscription_task(self, collector_config_id): target_nodes = [{"ip": "127.0.0.1", "bk_cloud_id": 0}] # 指定订阅节点 collector1 = CollectorHandler(collector_config_id=collector_config_id) task_id_one = copy.deepcopy(collector1.data.task_id_list) task_id_one.append(str(LAST_TASK_ID)) result1 = collector1._run_subscription_task("START", target_nodes) self.assertEqual(result1, task_id_one) @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": 6}) def _test_start(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) result = collector.start() self.assertEqual(result, ["6"]) @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": 7}) def _test_stop(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) result = collector.stop() self.assertEqual(result, ["7"]) @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": 8}) def _test_retry_target_nodes(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) task_id_list = copy.deepcopy(collector.data.task_id_list) task_id_list.append("8") target_nodes = [{"ip": "127.0.0.1", "bk_cloud_id": 0}] result = collector.retry_target_nodes(target_nodes) self.assertEqual(result, task_id_list) @patch("apps.api.NodeApi.delete_subscription", lambda _: DELETE_MSG) def _test_delete_subscription(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) result = collector._delete_subscription() self.assertTrue(result.get("result")) @patch("apps.api.NodeApi.run_subscription_task", lambda _: {"task_id": 8}) @patch("apps.api.NodeApi.delete_subscription", lambda _: DELETE_MSG) def _test_destroy(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) collector.destroy() with self.assertRaises(CollectorConfigNotExistException): CollectorHandler(collector_config_id=collector_config_id) def test_format_subscription_instance_status(self): result = CollectorHandler.format_subscription_instance_status(PART_FAILED_INSTANCE_DATA) self.assertEqual(result, STATUS_DATA_RETURN) @patch("apps.api.CCApi.search_biz_inst_topo", lambda _: TOPO_TREE) @patch("apps.api.NodeApi.get_subscription_instance_status", lambda _: STATUS_DATA) def _test_get_subscription_status(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) # 采集目标是HOST-INSTANCE collector.data.target_node_type = "INSTANCE" result = collector.get_subscription_status() self.assertFalse(result["contents"][0]["is_label"]) self.assertEqual(result["contents"][0]["bk_obj_name"], "主机") self.assertEqual(result["contents"][0]["node_path"], "主机") self.assertEqual(result["contents"][0]["bk_obj_id"], "host") # 如果采集目标是HOST-TOPO collector.data.target_node_type = "TOPO" result2 = collector.get_subscription_status() self.assertFalse(result2["contents"][0]["is_label"]) self.assertEqual(result2["contents"][0]["bk_obj_id"], "module") self.assertEqual(result2["contents"][0]["bk_inst_id"], 34) def test_get_node_mapping(self): result = CollectorHandler().get_node_mapping(TOPO_TREE) self.assertEqual(result, TOPO_TREE_RETURN) def _test_get_target_mapping(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) collector.data.target_subscription_diff = [ {"type": "add", "bk_inst_id": 2, "bk_obj_id": "biz"}, {"type": "add", "bk_inst_id": 3, "bk_obj_id": "module"}, {"type": "delete", "bk_inst_id": 4, "bk_obj_id": "set"}, {"type": "modify", "bk_inst_id": 5, "bk_obj_id": "module"}, ] result = collector.get_target_mapping() self.assertEqual({"module|5": "modify", "set|4": "delete", "module|3": "add", "biz|2": "add"}, result) @patch("apps.api.NodeApi.get_subscription_instance_status", lambda _: [PART_FAILED_INSTANCE_DATA]) def _test_get_part_failed_subscription_status(self, collector_config_id): result = CollectorHandler().get_subscription_status_by_list([collector_config_id]) self.assertEqual( result, [ { "collector_id": collector_config_id, "subscription_id": SUBSCRIPTION_ID, "status": "", "status_name": "", "total": 0, "success": 0, "failed": 0, "pending": 0, } ], ) @patch("apps.api.NodeApi.get_subscription_instance_status", lambda _: [FAILED_INSTANCE_DATA]) def _test_get_failed_subscription_status(self, collector_config_id): result = CollectorHandler().get_subscription_status_by_list([collector_config_id]) self.assertEqual( result, [ { "collector_id": collector_config_id, "subscription_id": 2, "status": "", "status_name": "", "total": 0, "success": 0, "failed": 0, "pending": 0, } ], ) @patch("apps.api.NodeApi.get_subscription_instance_status", lambda _: [SUCCESS_INSTANCE_DATA]) def _test_get_success_subscription_status(self, collector_config_id): result = CollectorHandler().get_subscription_status_by_list([collector_config_id]) self.assertEqual( result, [ { "collector_id": collector_config_id, "subscription_id": 2, "status": "", "status_name": "正常", "total": 1, "success": 1, "failed": 0, "pending": 0, } ], ) @patch("apps.api.NodeApi.get_subscription_instance_status", lambda _: [RUNNING_INSTANCE_DATA]) def _test_get_running_subscription_status(self, collector_config_id): result = CollectorHandler().get_subscription_status_by_list([collector_config_id]) self.assertEqual( result, [ { "collector_id": collector_config_id, "subscription_id": 2, "status": "RUNNING", "status_name": "部署中", "total": 1, "success": 0, "failed": 0, "pending": 1, } ], ) @patch("apps.api.NodeApi.get_subscription_task_detail", lambda _: TASK_DETAIL_DATA) def _test_get_subscription_task_detail(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) result = collector.get_subscription_task_detail("host|instance|host|127.0.0.1-0-0", task_id="24626") for i in ["unifytlogc", "下发插件配置", "更新插件部署状态", "渲染并下发配置", "重载插件进程"]: self.assertIn(i, result["log_detail"]) self.assertEqual(result.get("log_result").get("status"), "FAILED") self.assertEqual(result.get("log_result").get("task_id"), 24626) self.assertEqual(result.get("log_result").get("instance_id"), "host|instance|host|127.0.0.1-0-0") def test_get_instance_log(self): result = CollectorHandler.get_instance_log(TASK_DETAIL_DATA) result2 = CollectorHandler.get_instance_log({"steps": []}) self.assertEqual(result, "[unifytlogc] 下发插件配置-重载插件进程") self.assertEqual(result2, "") @patch("apps.decorators.user_operation_record.delay", return_value=True) @patch("apps.api.NodeApi.switch_subscription", lambda _: {}) @patch("apps.api.NodeApi.subscription_statistic", subscription_statistic) def test_format_task_instance_status(self, *args, **kwargs): _, create_result = TestCollectorHandler.create() collector_config_id = create_result["collector_config_id"] result = CollectorHandler(collector_config_id=collector_config_id).format_task_instance_status( [TASK_DETAIL_DATA] ) self.assertEqual(result[0]["status"], "FAILED") self.assertEqual(result[0]["ip"], "127.0.0.1") self.assertEqual(result[0]["log"], "[unifytlogc] 下发插件配置-重载插件进程") self.assertEqual(result[0]["instance_id"], "host|instance|host|127.0.0.1-0-0") self.assertEqual(result[0]["instance_name"], "127.0.0.1") self.assertEqual(result[0]["task_id"], 24626) @patch("apps.api.CCApi.search_biz_inst_topo", lambda _: TOPO_TREE) @patch("apps.api.NodeApi.get_subscription_task_status", lambda _: [TASK_DETAIL_DATA]) def _test_get_subscription_task_status(self, collector_config_id): collector = CollectorHandler(collector_config_id=collector_config_id) # 采集目标是HOST-TOPO result = collector.get_subscription_task_status(collector.data.task_id_list) self.assertTrue(result["contents"][0]["is_label"]) self.assertEqual(result["contents"][0]["label_name"], "add") self.assertEqual(result["contents"][0]["bk_obj_id"], "module") self.assertEqual(result["contents"][0]["bk_inst_id"], 34) # 采集目标是HOST-INSTANCE collector.data.target_node_type = "INSTANCE" result2 = collector.get_subscription_task_status(collector.data.task_id_list) self.assertFalse(result2["contents"][0]["is_label"]) self.assertEqual(result2["contents"][0]["bk_obj_name"], "主机") self.assertEqual(result2["contents"][0]["node_path"], "主机") self.assertEqual(result2["contents"][0]["bk_obj_id"], "host") def test_check_task_ready_exception(self): self.assertEqual(CollectorHandler._check_task_ready_exception(ApiRequestError("test1", 111)), True) self.assertEqual( CollectorHandler._check_task_ready_exception(ApiResultError("test2", code=1306201, errors="test2")), True ) with self.assertRaises(BaseException): CollectorHandler._check_task_ready_exception(ApiResultError("test2", code=111, errors="test2")) with self.assertRaises(BaseException): CollectorHandler._check_task_ready_exception(BaseException())
import httplib2 from bs4 import BeautifulSoup, SoupStrainer import urllib.request, urllib.error import os import re import sys def get(url): http = httplib2.Http(".cache", disable_ssl_certificate_validation=True) status, response = http.request(url) return response def getlinks(url): return BeautifulSoup(get(url),"html.parser", parseOnlyThese=SoupStrainer('a')) def pdfname(file_url,save_folder): start_index = file_url.rfind("/")+1 return save_folder+"/"+file_url[start_index:] def savepdf(link,base_url,save_folder): if link != "#" and link.endswith('pdf'): outfilename=pdfname(link,save_folder) if(not os.path.exists(outfilename)): pdf = urllib.request.urlopen(base_url+link).read() with open(outfilename, 'wb') as f: f.write(pdf) year=2016 conference="cvpr" argc=len(sys.argv) if(argc>1): year=int(sys.argv[1]) if(argc>2): conference=sys.argv[2] save_folder=conference+str(year) if(not os.path.exists(save_folder)): os.mkdir(save_folder) if(conference=="cvpr"): base_url = 'https://openaccess.thecvf.com/' url=base_url+'CVPR%d?day=all'%year # print(get(url)) links=getlinks(url) # print(links) for link in links: if link.has_key('href'): savepdf(link['href'],base_url,save_folder) elif(conference=="iccv"): base_url = 'https://openaccess.thecvf.com/' links=getlinks(base_url+'ICCV%d'%year) for link in links: if link.has_key('href'): savepdf(link['href'],base_url,save_folder) elif(conference=="nips"): base_url = 'https://papers.nips.cc/' links=getlinks(base_url) for l in links: if(len(re.findall(str(year),l.text))>0): turl=l['href'] links_of_year=getlinks(base_url+turl) print( len(links_of_year)) for l in links_of_year: links_of_a_paper=getlinks(base_url+l['href']) for link in links_of_a_paper: if link.has_key('href'): savepdf(link['href'],base_url,save_folder) else: print("not supperted conference :%s"%conference)
from django.contrib import admin from birth_rate_app.models import ( Hospital, Birth, ) # Register your models here. admin.site.register(Hospital) admin.site.register(Birth)
# Standard Library import random from collections import defaultdict from copy import copy # Third Party import numpy as np import networkx as nx import matplotlib.pyplot as plt from matplotlib.path import Path from matplotlib.patches import PathPatch from matplotlib import cm from scipy.interpolate import splprep, splev from scipy.spatial import ConvexHull ################## # COMMUNITY LAYOUT ################## def _inter_community_edges(G, partition): edges = defaultdict(list) for (i, j) in G.edges(): c_i = partition[i] c_j = partition[j] if c_i == c_j: continue edges[(c_i, c_j)].append((i, j)) return edges def _position_communities(G, partition, **kwargs): hypergraph = nx.Graph() hypergraph.add_nodes_from(set(partition)) inter_community_edges = _inter_community_edges(G, partition) for (c_i, c_j), edges in inter_community_edges.items(): hypergraph.add_edge(c_i, c_j, weight=len(edges)) pos_communities = nx.spring_layout(hypergraph, **kwargs) # Set node positions to positions of its community pos = dict() for node, community in enumerate(partition): pos[node] = pos_communities[community] return pos def _position_nodes(G, partition, **kwargs): communities = defaultdict(list) for node, community in enumerate(partition): communities[community].append(node) pos = dict() for c_i, nodes in communities.items(): subgraph = G.subgraph(nodes) pos_subgraph = nx.spring_layout(subgraph, **kwargs) pos.update(pos_subgraph) return pos # Adapted from: https://stackoverflow.com/questions/43541376/how-to-draw-communities-with-networkx def community_layout(G, partition): pos_communities = _position_communities(G, partition, scale=10.0) pos_nodes = _position_nodes(G, partition, scale=2.0) # Combine positions pos = dict() for node in G.nodes(): pos[node] = pos_communities[node] + pos_nodes[node] return pos ######### # PATCHES ######### def _node_coordinates(nodes): collection = copy(nodes) collection.set_offset_position("data") return collection.get_offsets() def _convex_hull_vertices(node_coordinates, community): points = np.array(node_coordinates[list(community)]) hull = ConvexHull(points) x, y = points[hull.vertices, 0], points[hull.vertices, 1] vertices = np.column_stack((x, y)) return vertices # https://en.wikipedia.org/wiki/Shoelace_formula#Statement def _convex_hull_area(vertices): A = 0.0 for i in range(-1, vertices.shape[0] - 1): A += vertices[i][0] * (vertices[i + 1][1] - vertices[i - 1][1]) return A / 2 # https://en.wikipedia.org/wiki/Centroid#Of_a_polygon def _convex_hull_centroid(vertices): A = _convex_hull_area(vertices) c_x, c_y = 0.0, 0.0 for i in range(vertices.shape[0]): x_i, y_i = vertices[i] if i == vertices.shape[0] - 1: x_i1, y_i1 = vertices[0] else: x_i1, y_i1 = vertices[i + 1] cross = ((x_i * y_i1) - (x_i1 * y_i)) c_x += (x_i + x_i1) * cross c_y += (y_i + y_i1) * cross return c_x / (6 * A), c_y / (6 * A) def _scale_convex_hull(vertices, offset): c_x, c_y = _convex_hull_centroid(vertices) for i, vertex in enumerate(vertices): v_x, v_y = vertex if v_x > c_x: vertices[i][0] += offset else: vertices[i][0] -= offset if v_y > c_y: vertices[i][1] += offset else: vertices[i][1] -= offset return vertices def _community_patch(vertices): vertices = _scale_convex_hull(vertices, 1) # TODO: Make offset dynamic tck, u = splprep(vertices.T, u=None, s=0.0, per=1) u_new = np.linspace(u.min(), u.max(), 1000) x_new, y_new = splev(u_new, tck, der=0) path = Path(np.column_stack((x_new, y_new))) patch = PathPatch(path, alpha=0.50, linewidth=0.0) return patch def draw_community_patches(nodes, communities, axes): node_coordinates = _node_coordinates(nodes) vertex_sets = [] for c_i, community in enumerate(communities): vertices = _convex_hull_vertices(node_coordinates, community) patch = _community_patch(vertices) patch.set_facecolor(nodes.to_rgba(c_i)) axes.add_patch(patch) vertex_sets.append(patch.get_path().vertices) _vertices = np.concatenate(vertex_sets) xlim = [_vertices[:, 0].min(), _vertices[:, 0].max()] ylim = [_vertices[:, 1].min(), _vertices[: ,1].max()] axes.set_xlim(xlim) axes.set_ylim(ylim) ################## # DRAW COMMUNITIES ################## def draw_communities(adj_matrix, communities, dark=False, filename=None, dpi=None, seed=1): np.random.seed(seed) random.seed(seed) G = nx.from_numpy_matrix(adj_matrix) partition = [0 for _ in range(G.number_of_nodes())] for c_i, nodes in enumerate(communities): for i in nodes: partition[i] = c_i plt.rcParams["figure.facecolor"] = "black" if dark else "white" plt.rcParams["axes.facecolor"] = "black" if dark else "white" fig, ax = plt.subplots(figsize=(8, 6)) ax.axis("off") node_size = 10200 / G.number_of_nodes() linewidths = 34 / G.number_of_nodes() pos = community_layout(G, partition) nodes = nx.draw_networkx_nodes( G, pos=pos, node_color=partition, linewidths=linewidths, cmap=cm.jet, ax=ax ) nodes.set_edgecolor("w") edges = nx.draw_networkx_edges( G, pos=pos, edge_color=(1.0, 1.0, 1.0, 0.75) if dark else (0.6, 0.6, 0.6, 1.0), width=linewidths, ax=ax ) draw_community_patches(nodes, communities, ax) if not filename: plt.show() else: plt.savefig(filename, dpi=dpi) return ax
from __future__ import absolute_import import mock import unittest from . import read_file from nose_parameterized import parameterized from pynetlib.route import Route from pynetlib.exceptions import ObjectNotFoundException, ObjectAlreadyExistsException class TestRoute(unittest.TestCase): def setUp(self): self.ip_route_list_output = read_file('ip_route_list') def test_init_route(self): destination = 'destination' device = 'eth0' route = Route(destination, device) self.assertIsNone(route.metric) self.assertIsNone(route.source) self.assertIsNone(route.gateway) self.assertIsNone(route.namespace) @parameterized.expand([ ('1.2.1.2/30', None, None, None, None, None, False, False, False), ('1.2.3.4/30', None, None, None, None, None, False, True, True), ('default', 'wlo1', None, None, '600', '192.168.0.254', True, False, True), ('169.254.0.0/16', 'docker0', 'link', None, '1000', None, False, False, True), ('172.17.0.0/16', 'docker0', 'link', '172.17.0.1', None, None, False, False, True), ('192.168.0.0/24', 'wlo1', 'link', '192.168.0.11', '600', None, False, False, True) ]) @mock.patch('pynetlib.route.execute_command') def test_route_discovery(self, destination, device, scope, source, metric, gateway, default, prohibit, reachable, execute_command): execute_command.return_value = self.ip_route_list_output route = Route(destination, device) route.scope = scope route.source = source route.gateway = gateway route.metric = metric routes = Route.discover() self.assertEqual(len(routes), 6) found_route = routes[routes.index(route)] self.assertTrue(self.deep_equality(route, found_route)) self.assertEqual(found_route.is_default(), default) self.assertEqual(found_route.is_prohibited(), prohibit) self.assertEqual(found_route.is_reachable(), reachable) @mock.patch('pynetlib.route.execute_command') def test_refresh_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('default', 'wlo1') route.refresh() execute_command.assert_called_once_with("ip route list", namespace=None) self.assertEqual(route.metric, '600') self.assertEqual(route.gateway, '192.168.0.254') self.assertIsNone(route.source) self.assertIsNone(route.scope) self.assertIsNone(route.namespace) @mock.patch('pynetlib.route.execute_command') def test_refresh_non_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('destination', 'device') with self.assertRaises(ObjectNotFoundException): route.refresh() execute_command.assert_called_once_with("ip route list", namespace=None) @mock.patch('pynetlib.route.execute_command') def test_contains_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('default', 'wlo1') route.gateway = '192.168.0.254' self.assertTrue(route.exists()) execute_command.assert_called_once_with('ip route list', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_add_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('default', 'wlo1') route.gateway = '192.168.0.254' with self.assertRaises(ObjectAlreadyExistsException): route.create() execute_command.assert_called_once_with('ip route list', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_add_non_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('1.2.3.4', 'wlo1') route.gateway = '192.168.0.254' route.create() execute_command.assert_called_with('ip route add 1.2.3.4 dev wlo1 via 192.168.0.254', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_delete_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('default', 'wlo1') route.gateway = '192.168.0.254' route.delete() execute_command.assert_called_with('ip route del default', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_delete_non_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('1.2.3.4', 'wlo1') route.gateway = '192.168.0.254' with self.assertRaises(ObjectNotFoundException): route.delete() execute_command.assert_called_with('ip route del 1.2.3.4', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_prohibit_non_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('10.10.10.10/24', 'wlo1') route.prohibit() execute_command.assert_called_with('ip route add prohibit 10.10.10.10/24', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_prohibit_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('1.2.3.4/30', None) with self.assertRaises(ObjectAlreadyExistsException): route.prohibit() execute_command.assert_called_with('ip route del 1.2.3.4', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_unreachable_non_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('10.10.10.10/24', 'wlo1') route.unreachable() execute_command.assert_called_with('ip route add unreachable 10.10.10.10/24', namespace=None) @mock.patch('pynetlib.route.execute_command') def test_unreachable_existing_route(self, execute_command): execute_command.return_value = self.ip_route_list_output route = Route('1.2.3.4/30', None) with self.assertRaises(ObjectAlreadyExistsException): route.unreachable() execute_command.assert_called_with('ip route del 1.2.3.4', namespace=None) def deep_equality(self, expected, actual): return expected.destination == actual.destination \ and expected.device == actual.device \ and expected.scope == actual.scope \ and expected.source == actual.source \ and expected.metric == actual.metric \ and expected.gateway == actual.gateway
variable = 1 print(lalala)
import sys import random import json import time import pygame from get_ip import check_for_internet_conection as get_ip import time from event_handler import unparse from renderer import draw_everything from time import sleep, localtime from weakref import WeakKeyDictionary from PodSixNet.Server import Server from PodSixNet.Channel import Channel from PodSixNet.Connection import connection, ConnectionListener class ClientMaze(ConnectionListener): def __init__(self, host, port): try: pygame.init() self.Connect((host, int(port))) self.screen = pygame.display.set_mode((800, 600)) except: exit() def Loop(self): connection.Send({'action': 'print_game_state'}) connection.Pump() self.Pump() def Network_connected(self, data): print("You are now connected to the server") def Network_error(self, data): print("error: {0}".format(data['error'][1])) connection.Close() def Network_disconnected(self, data): print('Server disconnected') exit() def Network_render_game_state(self, data): keys = pygame.key.get_pressed() if data['player_wins']: sys.exit() elif data['time_is_up']: sys.exit() if keys[pygame.K_LEFT]: connection.Send({'action': 'player_move', 'move': 'left'}) sleep(0.1) if keys[pygame.K_RIGHT]: connection.Send({'action': 'player_move', 'move': 'right'}) sleep(0.1) if keys[pygame.K_DOWN]: connection.Send({'action': 'player_move', 'move': 'down'}) sleep(0.1) if keys[pygame.K_UP]: connection.Send({'action': 'player_move', 'move': 'up'}) sleep(0.1) draw_everything(self.screen, data['objects']) for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() pygame.display.update() if __name__ == "__main__": host = get_ip() s = ClientMaze(host, int(31425)) s.Loop()
from django.conf.urls import path from django.urls import include from django.views.generic import TemplateView urlpatterns = [ path("tz_detect/", include("tz_detect.urls")), path("", TemplateView.as_view(template_name="index.html")), ]
""" User enters the Table %, Depth %, length, and width numbers to get rated diamond quality obtained via binary search. Quality: Excellent, Very Good, Good, Fair, Poor Table%: 53-63, 52 or 64-65, 51 or 66-68, 50 or 69-70, <50 or >70 Depth%: 58-62, 56-57.9 or 62.1-66, 53-55.9 or 66.1-71, 50-52.9 or 71.1-74, <50 or >74 Len/Wid: 1.35-1.50, 1.30-1.34 or 1.51-1.55, 1.25-1.29 or 1.56-1.60, 1.20-1.24 or 1.61-1.65, <1.20 or >1.65 Reference: https://www.diamonds.pro/education/oval-cut/ Creator: Kenny Hoang MIT License """ import unittest # Sorted metric list for each diamond property POOR, FAIR, GOOD, VERY_GOOD, EXCELLENT = "Poor", "Fair", "Good", "Very Good", "Excellent" REFERENCE = { "table": [ (POOR, 50.0), (FAIR, 51.0), (GOOD, 52.0), (VERY_GOOD, 53.0), (EXCELLENT, 63.0), (VERY_GOOD, 65.0), (GOOD, 68.0), (FAIR, 70.0), (POOR, float("inf"))], "depth": [ (POOR, 50.0), (FAIR, 52.9), (GOOD, 55.9), (VERY_GOOD, 57.9), (EXCELLENT, 62.0), (VERY_GOOD, 66.0), (GOOD, 71.0), (FAIR, 74.0), (POOR, float("inf"))], "length_width": [ (POOR, 1.2), (FAIR, 1.24), (GOOD, 1.29), (VERY_GOOD, 1.34), (EXCELLENT, 1.5), (VERY_GOOD, 1.55), (GOOD, 1.6), (FAIR, 1.65), (POOR, float("inf"))] } def get_rated_quality(metric_list, user_input): """ Do a binary search on a sorted metric list """ length = len(metric_list) low, mid, high = 0, length//2, length - 1 while high - low > 1: quality_value = metric_list[mid][1] if user_input > quality_value: low = mid # edge case where a very high value (poor quality) # doesn't print because mid will never = high value if # mid = ((high - low) // 2) + low mid = high - ((high - low) // 2) elif user_input < quality_value: high = mid mid = ((high - low) // 2) + low else: break return metric_list[mid][0] def get_diamond_quality(answer=None): test = True if answer else False questions = ("What is the Table Percentage? ", "What is the Depth Percentage? ", "What is the length of diamond? ", "What is the width of diamond? ") if not test: value = 0 # get property values for q in questions: # repeat question for answers <= 0 while not value or value < 0: value = float(input(q)) answer.append(value) # calculate length:width ratio answer[2] = answer[2]/answer[3] # store values for testing ret = [] if not test: print("\nScale: Excellent, Very Good, Good, Fair, Poor") for ans, (diamond_prop, metric_list) in zip(answer[:3], REFERENCE.items()): quality_level = get_rated_quality(metric_list, ans) if test: ret.append(quality_level) continue print("{} is {}, {}".format(diamond_prop, quality_level, ans)) return ret if __name__ == '__main__': get_diamond_quality() class OutputCheck(unittest.TestCase): """ Tests """ def test_get_diamond_quality(self): testcases = ( # cases for very high and low values (outer ends of the scale) ([0, 0, 0, 1], [POOR, POOR, POOR]), ([100, 100, 1001, 1], [POOR, POOR, POOR], # cases for values exactly at a specific metric ([50, 50, 1.2, 1], [POOR, POOR, POOR]), ([53, 57.9, 1.34, 1], [VERY_GOOD, VERY_GOOD, VERY_GOOD]), ([63, 62, 1.5, 1], [EXCELLENT, EXCELLENT, EXCELLENT])) ) for test in testcases: self.assertEqual(get_diamond_quality(test[0]), test[1])
import requests, json def geturl2(**kwargs): if kwargs.get("mcversion"): url = f"https://nitroxenon-minecraft-forge-v1.p.rapidapi.com/optifine/{kwargs.get('mcversion')}" headers = { 'x-rapidapi-key': "a6f51f9ea2mshf179951f6fc0d97p1b476ejsndba62ed12b1d", 'x-rapidapi-host': "nitroxenon-minecraft-forge-v1.p.rapidapi.com" } try: r = requests.get(url, headers=headers,timeout=5) data = json.loads(r.text) liURL = [] for info in data: url = f'https://optifine.net/download?f={info.get("filename")}' liURL.append(url) if kwargs.get('single'): break return(liURL) except requests.exceptions.Timeout: print("Connection Timeout! Retrying...") geturl2(kwargs) else: raise ValueError("Missing Argument: MCVersion!")
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved. # The scope of the test is to verify that code nested SDFGs with a unique name is generated only once # The nested SDFG compute vector addition on FPGA, with vectorization import dace import numpy as np import argparse import subprocess from dace.memlet import Memlet def make_vecAdd_sdfg(sdfg_name: str, dtype=dace.float32): vecWidth = 4 n = dace.symbol("size") vecAdd_sdfg = dace.SDFG(sdfg_name) vecType = dace.vector(dtype, vecWidth) x_name = "x" y_name = "y" z_name = "z" ########################################################################### # Copy data to FPGA copy_in_state = vecAdd_sdfg.add_state("copy_to_device") vecAdd_sdfg.add_array(x_name, shape=[n / vecWidth], dtype=vecType) vecAdd_sdfg.add_array(y_name, shape=[n / vecWidth], dtype=vecType) in_host_x = copy_in_state.add_read(x_name) in_host_y = copy_in_state.add_read(y_name) vecAdd_sdfg.add_array("device_x", shape=[n / vecWidth], dtype=vecType, storage=dace.dtypes.StorageType.FPGA_Global, transient=True) vecAdd_sdfg.add_array("device_y", shape=[n / vecWidth], dtype=vecType, storage=dace.dtypes.StorageType.FPGA_Global, transient=True) in_device_x = copy_in_state.add_write("device_x") in_device_y = copy_in_state.add_write("device_y") copy_in_state.add_memlet_path(in_host_x, in_device_x, memlet=Memlet.simple( in_host_x, "0:{}/{}".format(n, vecWidth))) copy_in_state.add_memlet_path(in_host_y, in_device_y, memlet=Memlet.simple( in_host_y, "0:{}/{}".format(n, vecWidth))) ########################################################################### # Copy data from FPGA vecAdd_sdfg.add_array(z_name, shape=[n / vecWidth], dtype=vecType) copy_out_state = vecAdd_sdfg.add_state("copy_to_host") vecAdd_sdfg.add_array("device_z", shape=[n / vecWidth], dtype=vecType, storage=dace.dtypes.StorageType.FPGA_Global, transient=True) out_device = copy_out_state.add_read("device_z") out_host = copy_out_state.add_write(z_name) copy_out_state.add_memlet_path(out_device, out_host, memlet=Memlet.simple( out_host, "0:{}/{}".format(n, vecWidth))) ######################################################################## # FPGA State fpga_state = vecAdd_sdfg.add_state("fpga_state") x = fpga_state.add_read("device_x") y = fpga_state.add_read("device_y") z = fpga_state.add_write("device_z") # ---------- ---------- # COMPUTE # ---------- ---------- vecMap_entry, vecMap_exit = fpga_state.add_map( 'vecAdd_map', dict(i='0:{0}/{1}'.format(n, vecWidth)), schedule=dace.dtypes.ScheduleType.FPGA_Device) vecAdd_tasklet = fpga_state.add_tasklet('vecAdd_task', ['x_con', 'y_con'], ['z_con'], 'z_con = x_con + y_con') fpga_state.add_memlet_path(x, vecMap_entry, vecAdd_tasklet, dst_conn='x_con', memlet=dace.Memlet.simple(x.data, "i")) fpga_state.add_memlet_path(y, vecMap_entry, vecAdd_tasklet, dst_conn='y_con', memlet=dace.Memlet.simple(y.data, 'i')) fpga_state.add_memlet_path(vecAdd_tasklet, vecMap_exit, z, src_conn='z_con', memlet=dace.Memlet.simple(z.data, 'i')) ###################################### # Interstate edges vecAdd_sdfg.add_edge(copy_in_state, fpga_state, dace.sdfg.sdfg.InterstateEdge()) vecAdd_sdfg.add_edge(fpga_state, copy_out_state, dace.sdfg.sdfg.InterstateEdge()) ######### # Validate vecAdd_sdfg.fill_scope_connectors() vecAdd_sdfg.validate() return vecAdd_sdfg def make_nested_sdfg_fpga(): ''' Build an SDFG with two nested SDFGs, each one a different state ''' n = dace.symbol("n") m = dace.symbol("m") sdfg = dace.SDFG("two_vecAdd") state = sdfg.add_state("state") # build the first axpy: works with x,y, and z of n-elements # ATTENTION: this two nested SDFG must have the same name as they are equal to_nest = make_vecAdd_sdfg("vecAdd") sdfg.add_array("x", [n], dace.float32) sdfg.add_array("y", [n], dace.float32) sdfg.add_array("z", [n], dace.float32) x = state.add_read("x") y = state.add_read("y") z = state.add_write("z") # add nested sdfg with symbol mapping nested_sdfg = state.add_nested_sdfg(to_nest, sdfg, {"x", "y"}, {"z"}, {"size": "n"}) state.add_memlet_path(x, nested_sdfg, dst_conn="x", memlet=Memlet.simple(x, "0:n", num_accesses=n)) state.add_memlet_path(y, nested_sdfg, dst_conn="y", memlet=Memlet.simple(y, "0:n", num_accesses=n)) state.add_memlet_path(nested_sdfg, z, src_conn="z", memlet=Memlet.simple(z, "0:n", num_accesses=n)) # Build the second axpy: works with v,w and u of m elements, use another state state2 = sdfg.add_state("state2") to_nest = make_vecAdd_sdfg("vecAdd") sdfg.add_array("v", [m], dace.float32) sdfg.add_array("w", [m], dace.float32) sdfg.add_array("u", [m], dace.float32) v = state2.add_read("v") w = state2.add_read("w") u = state2.add_write("u") nested_sdfg = state2.add_nested_sdfg(to_nest, sdfg, {"x", "y"}, {"z"}, {"size": "m"}) state2.add_memlet_path(v, nested_sdfg, dst_conn="x", memlet=Memlet.simple(v, "0:m", num_accesses=m)) state2.add_memlet_path(w, nested_sdfg, dst_conn="y", memlet=Memlet.simple(w, "0:m", num_accesses=m)) state2.add_memlet_path(nested_sdfg, u, src_conn="z", memlet=Memlet.simple(u, "0:m", num_accesses=m)) ###################################### # Interstate edges sdfg.add_edge(state, state2, dace.sdfg.sdfg.InterstateEdge()) sdfg.validate() return sdfg if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("N", type=int, nargs="?", default=32) parser.add_argument("M", type=int, nargs="?", default=64) args = vars(parser.parse_args()) size_n = args["N"] size_m = args["M"] sdfg = make_nested_sdfg_fpga() two_axpy = sdfg.compile() x = np.random.rand(size_n).astype(np.float32) y = np.random.rand(size_n).astype(np.float32) z = np.random.rand(size_n).astype(np.float32) v = np.random.rand(size_m).astype(np.float32) w = np.random.rand(size_m).astype(np.float32) u = np.random.rand(size_m).astype(np.float32) two_axpy(x=x, y=y, z=z, v=v, w=w, u=u, n=size_n, m=size_m) ref1 = np.add(x, y) ref2 = np.add(v, w) diff1 = np.linalg.norm(ref1 - z) / size_n diff2 = np.linalg.norm(ref2 - u) / size_m if diff1 <= 1e-5 and diff2 <= 1e-5: print("==== Program end ====") else: raise Exception("==== Program Error! ====") # There is no need to check that the Nested SDFG has been generated only once. If this is not the case # the test will fail while compiling
from __future__ import annotations from jsonclasses import jsonclass, types from jsonclasses_pymongo import pymongo @pymongo @jsonclass(class_graph='simple') class SimpleStrId: id: str = types.str.primary.required val: str
# //----------------------------// # // This file is part of RaiSim// # // Copyright 2020, RaiSim Tech// # //----------------------------// import numpy as np import platform import os class RaisimGymVecEnv: def __init__(self, impl, normalize_ob=True, seed=0, clip_obs=10.): if platform.system() == "Darwin": os.environ['KMP_DUPLICATE_LIB_OK']='True' self.normalize_ob = normalize_ob self.clip_obs = clip_obs self.wrapper = impl self.num_obs = self.wrapper.getObDim() self.num_acts = self.wrapper.getActionDim() self._observation = np.zeros([self.num_envs, self.num_obs], dtype=np.float32) self.actions = np.zeros([self.num_envs, self.num_acts], dtype=np.float32) self.log_prob = np.zeros(self.num_envs, dtype=np.float32) self._reward = np.zeros(self.num_envs, dtype=np.float32) self._done = np.zeros(self.num_envs, dtype=np.bool) self.rewards = [[] for _ in range(self.num_envs)] self.wrapper.setSeed(seed) self.count = 0.0 self.mean = np.zeros(self.num_obs, dtype=np.float32) self.var = np.zeros(self.num_obs, dtype=np.float32) def seed(self, seed=None): self.wrapper.setSeed(seed) def turn_on_visualization(self): self.wrapper.turnOnVisualization() def turn_off_visualization(self): self.wrapper.turnOffVisualization() def start_video_recording(self, file_name): self.wrapper.startRecordingVideo(file_name) def stop_video_recording(self): self.wrapper.stopRecordingVideo() def step(self, action): self.wrapper.step(action, self._reward, self._done) return self._reward.copy(), self._done.copy() def load_scaling(self, dir_name, iteration, count=1e5): mean_file_name = dir_name + "/mean" + str(iteration) + ".csv" var_file_name = dir_name + "/var" + str(iteration) + ".csv" self.count = count self.mean = np.loadtxt(mean_file_name, dtype=np.float32) self.var = np.loadtxt(var_file_name, dtype=np.float32) self.wrapper.setObStatistics(self.mean, self.var, self.count) def save_scaling(self, dir_name, iteration): mean_file_name = dir_name + "/mean" + iteration + ".csv" var_file_name = dir_name + "/var" + iteration + ".csv" self.wrapper.getObStatistics(self.mean, self.var, self.count) np.savetxt(mean_file_name, self.mean) np.savetxt(var_file_name, self.var) def observe(self, update_statistics=True): self.wrapper.observe(self._observation, update_statistics) return self._observation def reset(self): self._reward = np.zeros(self.num_envs, dtype=np.float32) self.wrapper.reset() def close(self): self.wrapper.close() def curriculum_callback(self): self.wrapper.curriculumUpdate() @property def num_envs(self): return self.wrapper.getNumOfEnvs()
from __future__ import unicode_literals HAVE_WEBSOCKET = False WebSocket = None # WebSocket: (URI, header={'Accept': 'nothing', 'X-Magic-Number': '42'})->WebSocket # only send, recv, close are guaranteed to exist HAVE_WS_WEBSOCKET_CLIENT, HAVE_WS_WEBSOCKETS, HAVE_WS_WEBSOCAT, HAVE_WS_NODEJS_WS_WRAPPER, HAVE_WS_NODEJS_WEBSOCKET_WRAPPER = (False, ) * 5 try: from websocket import create_connection, WebSocket def _enter(self): return self def _exit(self, type, value, traceback): self.close() WebSocket.__enter__ = _enter WebSocket.__exit__ = _exit def WebSocketClientWrapper(url, headers={}): return create_connection(url, headers=['%s: %s' % kv for kv in headers.items()]) HAVE_WS_WEBSOCKET_CLIENT = True HAVE_WEBSOCKET = True except (ImportError, ValueError, SyntaxError): WebSocketClientWrapper = None try: from .websockets import WebSocketsWrapper HAVE_WS_WEBSOCKETS = True HAVE_WEBSOCKET = True except (ImportError, ValueError, SyntaxError): WebSocketsWrapper = None try: from .websocat import AVAILABLE if AVAILABLE: from .websocat import WebsocatWrapper HAVE_WS_WEBSOCAT = True HAVE_WEBSOCKET = True else: WebsocatWrapper = None except (ImportError, ValueError, SyntaxError): WebsocatWrapper = None try: from .nodejs import NPM_IS_SANE if NPM_IS_SANE: from .nodejs import HAVE_NODEJS_WEBSOCKET_WRAPPER, HAVE_NODEJS_WS_WRAPPER if HAVE_NODEJS_WEBSOCKET_WRAPPER: from .nodejs import NodeJsWebsocketWrapper HAVE_WS_NODEJS_WEBSOCKET_WRAPPER = True HAVE_WEBSOCKET = True else: NodeJsWebsocketWrapper = None if HAVE_NODEJS_WS_WRAPPER: from .nodejs import NodeJsWsWrapper HAVE_WS_NODEJS_WS_WRAPPER = True HAVE_WEBSOCKET = True else: NodeJsWsWrapper = None else: NodeJsWebsocketWrapper, NodeJsWsWrapper = None, None except (ImportError, ValueError, SyntaxError): NodeJsWebsocketWrapper, NodeJsWsWrapper = None, None WebSocket = WebSocketClientWrapper or WebSocketsWrapper or WebsocatWrapper or NodeJsWebsocketWrapper or NodeJsWsWrapper
import TDMtermite # import numpy as np import json import re # create 'tdm_termite' instance object try : jack = TDMtermite.tdmtermite(b'samples/SineData.tdm',b'samples/SineData.tdx') except RuntimeError as e : print("failed to load/decode TDM files: " + str(e)) # list ids of channelgroups grpids = jack.get_channelgroup_ids() grpids = [x.decode() for x in grpids] print("list of channelgroups: ",grpids) for grp in grpids[0:2] : # obtain meta data of channelgroups grpinfo = jack.get_channelgroup_info(grp.encode()) print( json.dumps(grpinfo,sort_keys=False,indent=4) ) # write channelgroup to file try : grpname = re.sub('[^A-Za-z0-9]','',grpinfo['name']) grpfile = "./channelgroup_" + str(grp) + "_" + str(grpname) + ".csv" jack.print_channelgroup(grp.encode(),grpfile.encode(),True,ord(' '),b'') except RuntimeError as e : print("failed to print channelgroup: " + str(grp) + " : " + str(e)) # list ids of channels chnids = jack.get_channel_ids() chnids = [x.decode() for x in chnids] print("list of channels: ",chnids) for chn in chnids[0:2] : # obtain meta-data chninfo = jack.get_channel_info(chn.encode()) print( json.dumps(chninfo,sort_keys=False,indent=4) ) # channel data try : chndata = jack.get_channel(chn.encode()) except RuntimeError as e : print("failed to extract channel: " + str(chn) + " : " + str(e)) print(str(chndata[0:6]) + " ...") # write channel to file chnfile = "./channel_" + str(chn) + ".csv" try : jack.print_channel(chn.encode(),chnfile.encode(),True) except RuntimeError as e : print("failed to print channel: " + str(chn) + " : " + str(e))
import torch import torch.nn.functional as F class GlobalContext(torch.nn.Module): def __init__(self, num_classes=72): super(GlobalContext, self).__init__() self.num_classes = num_classes self.fc = torch.nn.Linear( in_features=self.num_classes * 74, out_features=128, bias=True) self.conv = torch.nn.Conv2d( in_channels=self.num_classes + 128, out_channels=self.num_classes, kernel_size=1, padding=0, bias=False) self.bn = torch.nn.BatchNorm2d( num_features=self.num_classes, momentum=0.9) def forward(self, x): in_size = x.size() # Nx72x1x74 y = torch.flatten(x, start_dim=1) # Nx4884 y = F.relu(self.fc(y)) # Nx128 y = y.view(in_size[0], y.size(1), 1, 1) # Nx128x1x1 y = y.repeat(1, 1, 1, in_size[3]) # Nx128x1x74 x = torch.cat((x, y), dim=1) # Nx200x1x74 x = F.relu(self.bn(self.conv(x))) # Nx72x1x74 z = torch.squeeze(x, dim=2) # Nx72x74:NCT return z
import os from elasticsearch import Elasticsearch import sqlalchemy as database from sqlalchemy.orm import sessionmaker from zeeguu_core.elastic.elastic_query_builder import build_elastic_query from comparison.mysqlFullText import mysql_fulltext_query from timeit import default_timer as timer from zeeguu_core.model import Language from compare_settings import * import csv es = Elasticsearch([ES_CONN_STRING]) def query_performance(mysql, index, size, content, topics, unwanted_topics, user_topics, unwanted_user_topics): language = Language("en", "English") language.id = 5 # build elasticsearch query query_body = build_elastic_query(size, content, topics, unwanted_topics, user_topics, unwanted_user_topics, language, 100, 0) # build Mysql query mysql_query = mysql_fulltext_query(mysql, size, content, topics, unwanted_topics, user_topics, unwanted_user_topics, language, 100, 0) # Elasticsearch res = es.search(index=index, body=query_body) for result in res['hits']['hits']: elastic_title = result['_source']['title'] elastic_content = result['_source']['content'] published_time = result['_source']['published_time'] write_results_to_csv("elastic", elastic_title, elastic_content, published_time) # Mysql result = mysql_query.all() for mysql_result in result: mysql_title = mysql_result.title mysql_content = mysql_result.content published_time = mysql_result.published_time write_results_to_csv("mysql_full_text", mysql_title, mysql_content, published_time) def average_time(lst): return (sum(lst) / len(lst))*1000 def average(lst): return sum(lst) / len(lst) def write_results_to_csv(name, title, content, published_time): file_exists = os.path.isfile(title_of_csv) with open(title_of_csv, 'a', newline='', encoding='utf-8') as csvfile: fieldnames = ['Technology', "Title", 'Content', 'Published Time'] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) if not file_exists: writer.writeheader() writer.writerow({'Technology': name, 'Title': title, 'Content': content, 'Published Time': published_time}) def run(sessions, requested_articles): for session in sessions: for nb_articles in requested_articles: query_performance(session[0], session[2], session[1], nb_articles, content, wanted_topics, unwanted_topics, wanted_user_topics, unwanted_user_topics) print('Done') if __name__ == '__main__': content = "soccer" wanted_topics = '' unwanted_topics = '' wanted_user_topics = '' unwanted_user_topics = 'the great depression' engine1000k = database.create_engine(DB1000K) Session1000k = sessionmaker(bind=engine1000k) session1000k = Session1000k() title_of_csv = 'Relevance_test.csv' session_lst = [(session1000k, '1000k', ES_INDEX)] requested_articles_lst = [10] run(session_lst, requested_articles_lst)
from logging import error import unittest from requests.api import request from app import app import technical class Test(unittest.TestCase): #UNIT TEST FOR app.py URL = "http://127.0.0.1:5000/test_task/api/distance_address" data_valid = {"address": "Moscow"} key_invalid = {"adres": "Moscow"} invalid_address_1 ={"address": "@5-!&*a"} invalid_address_2 ={"address": "-1@1 jgstuo2"} outside_mkad = {"address": "Jakarta, Indonesia"} error_1 = b'{"message":"You have to send data in json format"}\n' error_2 = b'{"message":"make sure you have key address in your JSON data"}\n' error_3 = b'{"message":"Invalid address!"}\n' error_4 = b'{"message":"Can not find your address!"}\n' error_5 = b'{"message":"Server do not have access to yandex API"}\n' inside_mkad = b'{"message":"area inside MKAD"}\n' # Test for index function # Test to check the index function if it run properly or not def test_index(self): test = app.test_client(self) response = test.get('/', content_type = 'html/text') self.assertEqual(response.status_code, 200) # Test for distance_address function # Test to check if address is inside Moscow ring road def test_inside_mkad(self): tester = app.test_client(self) response = tester.post(self.URL, json = self.data_valid, content_type = 'application/json') self.assertEqual(self.inside_mkad,response.data) self.assertEqual(response.status_code, 200) # Test to check if address is outside Moscow ring road def test_outside_mkad(self): tester = app.test_client(self) response = tester.post(self.URL, json = self.outside_mkad, content_type = 'application/json') self.assertNotEqual(response.data, self.inside_mkad) self.assertEqual(response.status_code, 200) # Test to check if client not post a json file type # In this case i try to use xml def test_content_type_not_json(self): test = app.test_client(self) response = test.post(self.URL, data = self.data_valid, content_type='application/xml') self.assertEqual(response.data, self.error_1) self.assertEqual(response.status_code, 415) # Test to check if key not is json file # valid key is 'address' but in this test used 'adres' def test_content_key_invalid(self): test = app.test_client(self) response = test.post(self.URL, json = self.key_invalid, content_type = 'application/json') self.assertEqual(response.data, self.error_2) self.assertEqual(response.status_code, 400) # Invalid address type 1 # This test check if client only send one letter in address or # maybe a number with single value like only "5" def test_invalid_addres_1(self): tester = app.test_client(self) response = tester.post(self.URL, json = self.invalid_address_1, content_type = 'application/json') self.assertEqual(response.data, self.error_3) self.assertEqual(response.status_code, 422) # Invalid address type 2 # If address have passed the invalid type 1 but yandex can not find # latitude and longitude from specific address def test_invalid_addres_2(self): tester = app.test_client(self) response = tester.post(self.URL, json = self.invalid_address_2, content_type = 'application/json') self.assertEqual(response.data, self.error_4) self.assertEqual(response.status_code, 404) # Test if our server have access to Yandex API # Error occurs when our server do not have valid API Key def test_access(self): tester = app.test_client(self) response = tester.post(self.URL, json = self.data_valid, content_type = 'application/json') self.assertNotEqual(response.data, self.error_5) self.assertNotEqual(response.status_code, 500) #UNIT TEST FOR technical.py # Test if address inside mkad [test class CheckDistance] def test_count_distance_1(self): lat = 55.753220 #lat for Moscow, Russia long = 37.622513 #long for Moscow, Russia obj_check_distance = technical.CheckDistance(lat,long) count_distance = obj_check_distance.count_distance() self.assertEqual('area inside MKAD', count_distance) # Test if address outside mkad [test class CheckDistance] # And test if count_distance return a value in float type def test_count_distance_2(self): lat = -6.175391 #lat for Jakarta, Indonesia long = 106.826261 #long for Jakarta, Indonesia obj_check_distance = technical.CheckDistance(lat,long) count_distance = obj_check_distance.count_distance() self.assertNotEqual('area inside MKAD', count_distance) self.assertIs(type(count_distance), float) # Test if lat, and long not in float type [test class CheckDistance] def test_count_distance_3(self): lat_1 = "-6.175391" long_1 = 106.826261 lat_2 = "Moscow, Russia" long_2 = "Jakarta, Indonesia" obj_check_distance_1 = technical.CheckDistance(lat_1,long_1) count_distance_1 = obj_check_distance_1.count_distance() self.assertEqual("latitude and longitude must be in float type", count_distance_1) obj_check_distance_2 = technical.CheckDistance(lat_2,long_2) count_distance_2 = obj_check_distance_2.count_distance() self.assertEqual("latitude and longitude must be in float type", count_distance_2) # Test haversine function if lat and long in float type # And check if result not in string type [test class CheckDistance] def test_haversine_1(self): lat_1 = 55.898947 #lat for MKAD, 88th kilometre, inner side long_1 = 37.632206 # long for MKAD, 88th kilometre, inner side lat_2 = 38.231572 long_2 = 25.192846 obj_check_distance = technical.CheckDistance(lat_2,long_2) haversine = obj_check_distance.haversine(lat_1, long_1, lat_2, long_2) self.assertIsNot(type(haversine), str) self.assertIs(type(haversine), float) # Test haversine function if lat and lon in integer def test_haversine_2(self): lat_1 = int(55) long_1 = int(37) lat_2 = int(38) long_2 = int(-25) obj_check_distance = technical.CheckDistance(lat_2,long_2) haversine = obj_check_distance.haversine(lat_1, long_1, lat_2, long_2) self.assertIsNot(type(haversine), str) # Test if lat or long in string type def test_haversine_3(self): lat_1 = str(55) #lat for MKAD, 88th kilometre, inner side long_1 = 37 # long for MKAD, 88th kilometre, inner side lat_2 = 38.0098 long_2 = "15" obj_check_distance = technical.CheckDistance(lat_2,long_2) haversine = obj_check_distance.haversine(lat_1, long_1, lat_2, long_2) self.assertEqual("latitude and longitude can not be string", haversine) # Test check_address function to check address is valid or not # Test valid if (address is a letter, length addres >=2) # Test valid if (address is number, length address >=2) def test_check_address_valid_1(self): address = "Moscow, Russia" obj_check_address = technical.TextPreprocessing(address) check_address = obj_check_address.check_address() self.assertEqual("valid", check_address) # Test if lat and long value in string type def test_check_address_valid_2(self): address = "55.2333, 25.444221" obj_check_address = technical.TextPreprocessing(address) check_address = obj_check_address.check_address() self.assertEqual("valid", check_address) # Test if address not in string type def test_check_address_valid_2(self): address = 55.233325 obj_check_address = technical.TextPreprocessing(address) check_address = obj_check_address.check_address() self.assertEqual("address must be in string type", check_address) if __name__ == "__main__": unittest.main()
#!env/bin/python # Copyright (C) 2017 Baofeng Dong # This program is released under the "MIT License". # Please see the file COPYING in the source # distribution of this software for license terms. from dashboard import app app.run(debug = True)
# Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive. # Example 1: # Input: [5,7] # Output: 4 # Example 2: # Input: [0,1] # Output: 0 class Solution(object): def rangeBitwiseAnd(self, m, n): """ :type m: int :type n: int :rtype: int """ # 找规律 位运算 # [5, 7]里共有三个数字,分别写出它们的二进制为: # 101  110  111 # 相与后的结果为100,仔细观察我们可以得出,最后的数是该数字范围内所有的数的左边共同的部分。 # 发现了规律后,我们只要写代码找到左边公共的部分即可: # 平移m和n,每次向右移一位,直到m和n相等,记录下所有平移的次数i,然后再把m左移i位即为最终结果。 i = 1 while m != n: m >>= 1 n >>= 1 i <<= 1 return m * i
# Generated by Django 3.1.1 on 2020-09-20 22:45 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('character', '0001_initial'), ] operations = [ migrations.AlterField( model_name='ancestry', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_ancestry_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='background', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_background_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='character', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_character_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='characterclass', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_characterclass_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='classandlevel', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_classandlevel_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='feature', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_feature_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='featuresatlevel', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_featuresatlevel_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventoryadventuringgear', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_inventoryadventuringgear_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventoryarmor', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_inventoryarmor_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventorytool', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_inventorytool_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventoryweapon', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_inventoryweapon_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventorywondrousitem', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_inventorywondrousitem_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='skillproficiency', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_skillproficiency_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='spellslotsatlevel', name='authorized_editors', field=models.ManyToManyField(blank=True, related_name='_spellslotsatlevel_authorized_editors_+', to=settings.AUTH_USER_MODEL), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from __future__ import absolute_import from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('sms', '0009_check_for_domain_default_backend_migration'), ] operations = [ migrations.AlterField( model_name='sqlmobilebackend', name='couch_id', field=models.CharField(unique=True, max_length=126, db_index=True), preserve_default=True, ), ]
#!/usr/bin/env python3 """Translate chromosomes in Ensembl GFF3, ignore chromosomes missing from lookup.""" lookup_table = {x: f'chr{x}' for x in list(range(1, 23)).extend('X', 'Y')} lookup_table.extend({'MT': 'chrM'}) with open('/dev/stdin', 'r') as gff: for row in gff: fields = row.split() if fields[0] == '##sequence-region': if fields[1] in lookup_table: fields[1] = lookup_table[fields[1]] print('\t'.join(fields)) elif fields[0].startswith('#'): print('\t'.join(fields)) else: if fields[0] in lookup_table: fields[0] = lookup_table[fields[0]] print('\t'.join(fields))
import matplotlib.pyplot as plt def plot(all_losses): plt.figure() plt.plot(all_losses) plt.show()
from yowsup.structs import ProtocolEntity, ProtocolTreeNode class FailureProtocolEntity(ProtocolEntity): def __init__(self, reason): super(FailureProtocolEntity, self).__init__("failure") self.reason = reason def __str__(self): out = "Failure:\n" out += "Reason: %s\n" % self.reason return out def getReason(self): return self.reason def toProtocolTreeNode(self): reasonNode = ProtocolTreeNode(self.reason, {}) return self._createProtocolTreeNode({}, children = [reasonNode]) @staticmethod def fromProtocolTreeNode(node): return FailureProtocolEntity( node.getAllChildren()[0].tag )