rimossi cloudscraper, simplejson e torrentool, aggiornato sambatools
This commit is contained in:
@@ -7,12 +7,9 @@ import requests, json, copy, inspect
|
||||
from core import support
|
||||
from platformcode import autorenumber
|
||||
|
||||
try: from lib import cloudscraper
|
||||
except: from lib import cloudscraper
|
||||
|
||||
host = support.config.get_channel_url()
|
||||
response = cloudscraper.create_scraper().get(host + '/archivio')
|
||||
csrf_token = support.match(response.text, patron= 'name="csrf-token" content="([^"]+)"').match
|
||||
response = support.httptools.downloadpage(host + '/archivio')
|
||||
csrf_token = support.match(response.data, patron='name="csrf-token" content="([^"]+)"').match
|
||||
headers = {'content-type': 'application/json;charset=UTF-8',
|
||||
'x-csrf-token': csrf_token,
|
||||
'Cookie' : '; '.join([x.name + '=' + x.value for x in response.cookies])}
|
||||
|
||||
@@ -37,7 +37,6 @@ if not xbmc_vfs:
|
||||
except:
|
||||
samba = None
|
||||
# Python 2.4 Not compatible with samba module, you have to check
|
||||
|
||||
# Windows is "mbcs" linux, osx, android is "utf8"
|
||||
if os.name == "nt":
|
||||
fs_encoding = ""
|
||||
|
||||
@@ -8,27 +8,7 @@ import traceback
|
||||
from platformcode import logger
|
||||
from inspect import stack
|
||||
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
logger.error("json included in the interpreter **NOT** available")
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except:
|
||||
logger.error("simplejson included in the interpreter **NOT** available")
|
||||
try:
|
||||
from lib import simplejson as json
|
||||
except:
|
||||
logger.error("simplejson in lib directory **NOT** available")
|
||||
logger.error("A valid JSON parser was not found")
|
||||
json = None
|
||||
else:
|
||||
logger.info("Using simplejson in the lib directory")
|
||||
else:
|
||||
logger.error("Using simplejson included in the interpreter")
|
||||
# ~ else:
|
||||
# ~ logger.info("Usando json incluido en el interprete")
|
||||
import json
|
||||
|
||||
import sys
|
||||
PY3 = False
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
#-*- coding: utf-8 -*-
|
||||
#
|
||||
####
|
||||
# 2006/02 Will Holcomb <wholcomb@gmail.com>
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# 2007/07/26 Slightly modified by Brian Schneider
|
||||
#
|
||||
# in order to support unicode files ( multipart_encode function )
|
||||
# From http://peerit.blogspot.com/2007/07/multipartposthandler-doesnt-work-for.html
|
||||
#
|
||||
# 2013/07 Ken Olum <kdo@cosmos.phy.tufts.edu>
|
||||
#
|
||||
# Removed one of \r\n and send Content-Length
|
||||
#
|
||||
# 2014/05 Applied Fedora rpm patch
|
||||
#
|
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=920778
|
||||
# http://pkgs.fedoraproject.org/cgit/python-MultipartPostHandler2.git/diff/python-MultipartPostHandler2-cut-out-main.patch?id=c1638bb3e45596232b4d02f1e69901db0c28cfdb
|
||||
#
|
||||
# 2014/05/09 Sérgio Basto <sergio@serjux.com>
|
||||
#
|
||||
# Better deal with None values, don't throw an exception and just send an empty string.
|
||||
# Simplified text example
|
||||
#
|
||||
"""
|
||||
Usage:
|
||||
Enables the use of multipart/form-data for posting forms
|
||||
|
||||
Inspirations:
|
||||
Upload files in python:
|
||||
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
|
||||
urllib2_file:
|
||||
Fabien Seisen: <fabien@seisen.org>
|
||||
|
||||
Example:
|
||||
import MultipartPostHandler, urllib2
|
||||
|
||||
opener = urllib2.build_opener(MultipartPostHandler.MultipartPostHandler)
|
||||
params = { "username" : "bob", "password" : "riviera",
|
||||
"file" : open("filename", "rb") }
|
||||
opener.open("http://wwww.bobsite.com/upload/", params)
|
||||
"""
|
||||
|
||||
import urllib
|
||||
import urllib2
|
||||
import mimetools, mimetypes
|
||||
import os, stat
|
||||
from cStringIO import StringIO
|
||||
|
||||
class Callable:
|
||||
def __init__(self, anycallable):
|
||||
self.__call__ = anycallable
|
||||
|
||||
# Controls how sequences are uncoded. If true, elements may be given multiple values by
|
||||
# assigning a sequence.
|
||||
doseq = 1
|
||||
|
||||
class MultipartPostHandler(urllib2.BaseHandler):
|
||||
handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
|
||||
|
||||
def http_request(self, request):
|
||||
data = request.get_data()
|
||||
if data is not None and type(data) != str:
|
||||
v_files = []
|
||||
v_vars = []
|
||||
try:
|
||||
for(key, value) in data.items():
|
||||
if type(value) == file:
|
||||
v_files.append((key, value))
|
||||
else:
|
||||
v_vars.append((key, value))
|
||||
except TypeError:
|
||||
systype, value, traceback = sys.exc_info()
|
||||
raise TypeError, "not a valid non-string sequence or mapping object", traceback
|
||||
|
||||
if len(v_files) == 0:
|
||||
data = urllib.urlencode(v_vars, doseq)
|
||||
else:
|
||||
boundary, data = self.multipart_encode(v_vars, v_files)
|
||||
contenttype = 'multipart/form-data; boundary=%s' % boundary
|
||||
# ~ if(request.has_header('Content-Type')
|
||||
# ~ and request.get_header('Content-Type').find('multipart/form-data') != 0):
|
||||
# ~ print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
|
||||
request.add_unredirected_header('Content-Type', contenttype)
|
||||
|
||||
request.add_data(data)
|
||||
return request
|
||||
|
||||
def multipart_encode(vars, files, boundary = None, buffer = None):
|
||||
if boundary is None:
|
||||
boundary = mimetools.choose_boundary()
|
||||
if buffer is None:
|
||||
buffer = StringIO()
|
||||
for(key, value) in vars:
|
||||
buffer.write('--%s\r\n' % boundary)
|
||||
buffer.write('Content-Disposition: form-data; name="%s"' % key)
|
||||
if value is None:
|
||||
value = ""
|
||||
# if type(value) is not str, we need str(value) to not error with cannot concatenate 'str'
|
||||
# and 'dict' or 'tuple' or somethingelse objects
|
||||
buffer.write('\r\n\r\n' + str(value) + '\r\n')
|
||||
for(key, fd) in files:
|
||||
file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
|
||||
filename = fd.name.split('/')[-1]
|
||||
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
|
||||
buffer.write('--%s\r\n' % boundary)
|
||||
buffer.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
|
||||
buffer.write('Content-Type: %s\r\n' % contenttype)
|
||||
buffer.write('Content-Length: %s\r\n' % file_size)
|
||||
fd.seek(0)
|
||||
buffer.write('\r\n' + fd.read() + '\r\n')
|
||||
buffer.write('--' + boundary + '--\r\n')
|
||||
buffer = buffer.getvalue()
|
||||
return boundary, buffer
|
||||
multipart_encode = Callable(multipart_encode)
|
||||
|
||||
https_request = http_request
|
||||
@@ -1,836 +0,0 @@
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
import sys
|
||||
import ssl
|
||||
|
||||
from collections import OrderedDict
|
||||
from copy import deepcopy
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.sessions import Session
|
||||
from requests_toolbelt.utils import dump
|
||||
|
||||
from time import sleep
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
try:
|
||||
import brotli
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import copyreg
|
||||
except ImportError:
|
||||
import copy_reg as copyreg
|
||||
|
||||
try:
|
||||
from HTMLParser import HTMLParser
|
||||
except ImportError:
|
||||
if sys.version_info >= (3, 4):
|
||||
import html
|
||||
else:
|
||||
from html.parser import HTMLParser
|
||||
|
||||
try:
|
||||
from urlparse import urlparse, urljoin
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse, urljoin
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
from .exceptions import (
|
||||
CloudflareLoopProtection,
|
||||
CloudflareCode1020,
|
||||
CloudflareIUAMError,
|
||||
CloudflareSolveError,
|
||||
CloudflareChallengeError,
|
||||
CloudflareCaptchaError,
|
||||
CloudflareCaptchaProvider
|
||||
)
|
||||
|
||||
from .interpreters import JavaScriptInterpreter
|
||||
from .captcha import Captcha
|
||||
from .user_agent import User_Agent
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
__version__ = '1.2.46'
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class CipherSuiteAdapter(HTTPAdapter):
|
||||
|
||||
__attrs__ = [
|
||||
'ssl_context',
|
||||
'max_retries',
|
||||
'config',
|
||||
'_pool_connections',
|
||||
'_pool_maxsize',
|
||||
'_pool_block',
|
||||
'source_address'
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.ssl_context = kwargs.pop('ssl_context', None)
|
||||
self.cipherSuite = kwargs.pop('cipherSuite', None)
|
||||
self.source_address = kwargs.pop('source_address', None)
|
||||
|
||||
if self.source_address:
|
||||
if isinstance(self.source_address, str):
|
||||
self.source_address = (self.source_address, 0)
|
||||
|
||||
if not isinstance(self.source_address, tuple):
|
||||
raise TypeError(
|
||||
"source_address must be IP address string or (ip, port) tuple"
|
||||
)
|
||||
|
||||
if not self.ssl_context:
|
||||
self.ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||
self.ssl_context.set_ciphers(self.cipherSuite)
|
||||
self.ssl_context.set_ecdh_curve('prime256v1')
|
||||
self.ssl_context.options |= (ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
|
||||
|
||||
super(CipherSuiteAdapter, self).__init__(**kwargs)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def init_poolmanager(self, *args, **kwargs):
|
||||
kwargs['ssl_context'] = self.ssl_context
|
||||
kwargs['source_address'] = self.source_address
|
||||
return super(CipherSuiteAdapter, self).init_poolmanager(*args, **kwargs)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def proxy_manager_for(self, *args, **kwargs):
|
||||
kwargs['ssl_context'] = self.ssl_context
|
||||
kwargs['source_address'] = self.source_address
|
||||
return super(CipherSuiteAdapter, self).proxy_manager_for(*args, **kwargs)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class CloudScraper(Session):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.debug = kwargs.pop('debug', False)
|
||||
self.delay = kwargs.pop('delay', None)
|
||||
self.cipherSuite = kwargs.pop('cipherSuite', None)
|
||||
self.ssl_context = kwargs.pop('ssl_context', None)
|
||||
self.interpreter = kwargs.pop('interpreter', 'native')
|
||||
self.captcha = kwargs.pop('captcha', {})
|
||||
self.requestPreHook = kwargs.pop('requestPreHook', None)
|
||||
self.requestPostHook = kwargs.pop('requestPostHook', None)
|
||||
self.source_address = kwargs.pop('source_address', None)
|
||||
self.doubleDown = kwargs.pop('doubleDown', True)
|
||||
|
||||
self.allow_brotli = kwargs.pop(
|
||||
'allow_brotli',
|
||||
True if 'brotli' in sys.modules.keys() else False
|
||||
)
|
||||
|
||||
self.user_agent = User_Agent(
|
||||
allow_brotli=self.allow_brotli,
|
||||
browser=kwargs.pop('browser', None)
|
||||
)
|
||||
|
||||
self._solveDepthCnt = 0
|
||||
self.solveDepth = kwargs.pop('solveDepth', 3)
|
||||
|
||||
super(CloudScraper, self).__init__(*args, **kwargs)
|
||||
|
||||
# pylint: disable=E0203
|
||||
if 'requests' in self.headers['User-Agent']:
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Set a random User-Agent if no custom User-Agent has been set
|
||||
# ------------------------------------------------------------------------------- #
|
||||
self.headers = self.user_agent.headers
|
||||
if not self.cipherSuite:
|
||||
self.cipherSuite = self.user_agent.cipherSuite
|
||||
|
||||
if isinstance(self.cipherSuite, list):
|
||||
self.cipherSuite = ':'.join(self.cipherSuite)
|
||||
|
||||
self.mount(
|
||||
'https://',
|
||||
CipherSuiteAdapter(
|
||||
cipherSuite=self.cipherSuite,
|
||||
ssl_context=self.ssl_context,
|
||||
source_address=self.source_address
|
||||
)
|
||||
)
|
||||
|
||||
# purely to allow us to pickle dump
|
||||
copyreg.pickle(ssl.SSLContext, lambda obj: (obj.__class__, (obj.protocol,)))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Allow us to pickle our session back with all variables
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def __getstate__(self):
|
||||
return self.__dict__
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Allow replacing actual web request call via subclassing
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def perform_request(self, method, url, *args, **kwargs):
|
||||
return super(CloudScraper, self).request(method, url, *args, **kwargs)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Raise an Exception with no stacktrace and reset depth counter.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def simpleException(self, exception, msg):
|
||||
self._solveDepthCnt = 0
|
||||
sys.tracebacklimit = 0
|
||||
raise exception(msg)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# debug the request via the response
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def debugRequest(req):
|
||||
try:
|
||||
print(dump.dump_all(req).decode('utf-8'))
|
||||
except ValueError as e:
|
||||
print("Debug Error: {}".format(getattr(e, 'message', e)))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Unescape / decode html entities
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def unescape(html_text):
|
||||
if sys.version_info >= (3, 0):
|
||||
if sys.version_info >= (3, 4):
|
||||
return html.unescape(html_text)
|
||||
|
||||
return HTMLParser().unescape(html_text)
|
||||
|
||||
return HTMLParser().unescape(html_text)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Decode Brotli on older versions of urllib3 manually
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def decodeBrotli(self, resp):
|
||||
if requests.packages.urllib3.__version__ < '1.25.1' and resp.headers.get('Content-Encoding') == 'br':
|
||||
if self.allow_brotli and resp._content:
|
||||
resp._content = brotli.decompress(resp.content)
|
||||
else:
|
||||
logging.warning(
|
||||
'You\'re running urllib3 {}, Brotli content detected, '
|
||||
'Which requires manual decompression, '
|
||||
'But option allow_brotli is set to False, '
|
||||
'We will not continue to decompress.'.format(requests.packages.urllib3.__version__)
|
||||
)
|
||||
|
||||
return resp
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Our hijacker request function
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# pylint: disable=E0203
|
||||
if kwargs.get('proxies') and kwargs.get('proxies') != self.proxies:
|
||||
self.proxies = kwargs.get('proxies')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Pre-Hook the request via user defined function.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self.requestPreHook:
|
||||
(method, url, args, kwargs) = self.requestPreHook(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Make the request via requests.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
response = self.decodeBrotli(
|
||||
self.perform_request(method, url, *args, **kwargs)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Debug the request via the Response object.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self.debug:
|
||||
self.debugRequest(response)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Post-Hook the request aka Post-Hook the response via user defined function.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self.requestPostHook:
|
||||
response = self.requestPostHook(self, response)
|
||||
|
||||
if self.debug:
|
||||
self.debugRequest(response)
|
||||
|
||||
# Check if Cloudflare anti-bot is on
|
||||
if self.is_Challenge_Request(response):
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Try to solve the challenge and send it back
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self._solveDepthCnt >= self.solveDepth:
|
||||
_ = self._solveDepthCnt
|
||||
self.simpleException(
|
||||
CloudflareLoopProtection,
|
||||
"!!Loop Protection!! We have tried to solve {} time(s) in a row.".format(_)
|
||||
)
|
||||
|
||||
self._solveDepthCnt += 1
|
||||
|
||||
response = self.Challenge_Response(response, **kwargs)
|
||||
else:
|
||||
if not response.is_redirect and response.status_code not in [429, 503]:
|
||||
self._solveDepthCnt = 0
|
||||
|
||||
return response
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# check if the response contains a valid Cloudflare challenge
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def is_IUAM_Challenge(resp):
|
||||
try:
|
||||
return (
|
||||
resp.headers.get('Server', '').startswith('cloudflare')
|
||||
and resp.status_code in [429, 503]
|
||||
and re.search(
|
||||
r'<form .*?="challenge-form" action="/.*?__cf_chl_jschl_tk__=\S+"',
|
||||
resp.text,
|
||||
re.M | re.S
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# check if the response contains new Cloudflare challenge
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def is_New_IUAM_Challenge(resp):
|
||||
try:
|
||||
return (
|
||||
resp.headers.get('Server', '').startswith('cloudflare')
|
||||
and resp.status_code in [429, 503]
|
||||
and re.search(
|
||||
r'cpo.src\s*=\s*"/cdn-cgi/challenge-platform/orchestrate/jsch/v1"',
|
||||
resp.text,
|
||||
re.M | re.S
|
||||
)
|
||||
and re.search(r'window._cf_chl_enter\(', resp.text, re.M | re.S)
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# check if the response contains a v2 hCaptcha Cloudflare challenge
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def is_New_Captcha_Challenge(resp):
|
||||
try:
|
||||
return (
|
||||
CloudScraper.is_Captcha_Challenge(resp)
|
||||
and re.search(
|
||||
r'cpo.src\s*=\s*"/cdn-cgi/challenge-platform/orchestrate/captcha/v1"',
|
||||
resp.text,
|
||||
re.M | re.S
|
||||
)
|
||||
and re.search(r'window._cf_chl_enter\(', resp.text, re.M | re.S)
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# check if the response contains a Cloudflare hCaptcha challenge
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def is_Captcha_Challenge(resp):
|
||||
try:
|
||||
return (
|
||||
resp.headers.get('Server', '').startswith('cloudflare')
|
||||
and resp.status_code == 403
|
||||
and re.search(
|
||||
r'action="/\S+__cf_chl_captcha_tk__=\S+',
|
||||
resp.text,
|
||||
re.M | re.DOTALL
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# check if the response contains Firewall 1020 Error
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def is_Firewall_Blocked(resp):
|
||||
try:
|
||||
return (
|
||||
resp.headers.get('Server', '').startswith('cloudflare')
|
||||
and resp.status_code == 403
|
||||
and re.search(
|
||||
r'<span class="cf-error-code">1020</span>',
|
||||
resp.text,
|
||||
re.M | re.DOTALL
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Wrapper for is_Captcha_Challenge, is_IUAM_Challenge, is_Firewall_Blocked
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def is_Challenge_Request(self, resp):
|
||||
if self.is_Firewall_Blocked(resp):
|
||||
self.simpleException(
|
||||
CloudflareCode1020,
|
||||
'Cloudflare has blocked this request (Code 1020 Detected).'
|
||||
)
|
||||
|
||||
if self.is_New_Captcha_Challenge(resp):
|
||||
self.simpleException(
|
||||
CloudflareChallengeError,
|
||||
'Detected a Cloudflare version 2 challenge, This feature is not available in the opensource (free) version.'
|
||||
)
|
||||
|
||||
if self.is_New_IUAM_Challenge(resp):
|
||||
self.simpleException(
|
||||
CloudflareChallengeError,
|
||||
'Detected a Cloudflare version 2 Captcha challenge, This feature is not available in the opensource (free) version.'
|
||||
)
|
||||
|
||||
if self.is_Captcha_Challenge(resp) or self.is_IUAM_Challenge(resp):
|
||||
if self.debug:
|
||||
print('Detected a Cloudflare version 1 challenge.')
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Try to solve cloudflare javascript challenge.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def IUAM_Challenge_Response(self, body, url, interpreter):
|
||||
try:
|
||||
formPayload = re.search(
|
||||
r'<form (?P<form>.*?="challenge-form" '
|
||||
r'action="(?P<challengeUUID>.*?'
|
||||
r'__cf_chl_jschl_tk__=\S+)"(.*?)</form>)',
|
||||
body,
|
||||
re.M | re.DOTALL
|
||||
).groupdict()
|
||||
|
||||
if not all(key in formPayload for key in ['form', 'challengeUUID']):
|
||||
self.simpleException(
|
||||
CloudflareIUAMError,
|
||||
"Cloudflare IUAM detected, unfortunately we can't extract the parameters correctly."
|
||||
)
|
||||
|
||||
payload = OrderedDict()
|
||||
for challengeParam in re.findall(r'^\s*<input\s(.*?)/>', formPayload['form'], re.M | re.S):
|
||||
inputPayload = dict(re.findall(r'(\S+)="(\S+)"', challengeParam))
|
||||
if inputPayload.get('name') in ['r', 'jschl_vc', 'pass']:
|
||||
payload.update({inputPayload['name']: inputPayload['value']})
|
||||
|
||||
except AttributeError:
|
||||
self.simpleException(
|
||||
CloudflareIUAMError,
|
||||
"Cloudflare IUAM detected, unfortunately we can't extract the parameters correctly."
|
||||
)
|
||||
|
||||
hostParsed = urlparse(url)
|
||||
|
||||
try:
|
||||
payload['jschl_answer'] = JavaScriptInterpreter.dynamicImport(
|
||||
interpreter
|
||||
).solveChallenge(body, hostParsed.netloc)
|
||||
except Exception as e:
|
||||
self.simpleException(
|
||||
CloudflareIUAMError,
|
||||
'Unable to parse Cloudflare anti-bots page: {}'.format(
|
||||
getattr(e, 'message', e)
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
'url': '{}://{}{}'.format(
|
||||
hostParsed.scheme,
|
||||
hostParsed.netloc,
|
||||
self.unescape(formPayload['challengeUUID'])
|
||||
),
|
||||
'data': payload
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Try to solve the Captcha challenge via 3rd party.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def captcha_Challenge_Response(self, provider, provider_params, body, url):
|
||||
try:
|
||||
formPayload = re.search(
|
||||
r'<form (?P<form>.*?="challenge-form" '
|
||||
r'action="(?P<challengeUUID>.*?__cf_chl_captcha_tk__=\S+)"(.*?)</form>)',
|
||||
body,
|
||||
re.M | re.DOTALL
|
||||
).groupdict()
|
||||
|
||||
if not all(key in formPayload for key in ['form', 'challengeUUID']):
|
||||
self.simpleException(
|
||||
CloudflareCaptchaError,
|
||||
"Cloudflare Captcha detected, unfortunately we can't extract the parameters correctly."
|
||||
)
|
||||
|
||||
payload = OrderedDict(
|
||||
re.findall(
|
||||
r'(name="r"\svalue|data-ray|data-sitekey|name="cf_captcha_kind"\svalue)="(.*?)"',
|
||||
formPayload['form']
|
||||
)
|
||||
)
|
||||
|
||||
captchaType = 'reCaptcha' if payload['name="cf_captcha_kind" value'] == 're' else 'hCaptcha'
|
||||
|
||||
except (AttributeError, KeyError):
|
||||
self.simpleException(
|
||||
CloudflareCaptchaError,
|
||||
"Cloudflare Captcha detected, unfortunately we can't extract the parameters correctly."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Pass proxy parameter to provider to solve captcha.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self.proxies and self.proxies != self.captcha.get('proxy'):
|
||||
self.captcha['proxy'] = self.proxies
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Pass User-Agent if provider supports it to solve captcha.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
self.captcha['User-Agent'] = self.headers['User-Agent']
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Submit job to provider to request captcha solve.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
captchaResponse = Captcha.dynamicImport(
|
||||
provider.lower()
|
||||
).solveCaptcha(
|
||||
captchaType,
|
||||
url,
|
||||
payload['data-sitekey'],
|
||||
provider_params
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Parse and handle the response of solved captcha.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
dataPayload = OrderedDict([
|
||||
('r', payload.get('name="r" value', '')),
|
||||
('cf_captcha_kind', payload['name="cf_captcha_kind" value']),
|
||||
('id', payload.get('data-ray')),
|
||||
('g-recaptcha-response', captchaResponse)
|
||||
])
|
||||
|
||||
if captchaType == 'hCaptcha':
|
||||
dataPayload.update({'h-captcha-response': captchaResponse})
|
||||
|
||||
hostParsed = urlparse(url)
|
||||
|
||||
return {
|
||||
'url': '{}://{}{}'.format(
|
||||
hostParsed.scheme,
|
||||
hostParsed.netloc,
|
||||
self.unescape(formPayload['challengeUUID'])
|
||||
),
|
||||
'data': dataPayload
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Attempt to handle and send the challenge response back to cloudflare
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def Challenge_Response(self, resp, **kwargs):
|
||||
if self.is_Captcha_Challenge(resp):
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# double down on the request as some websites are only checking
|
||||
# if cfuid is populated before issuing Captcha.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self.doubleDown:
|
||||
resp = self.decodeBrotli(
|
||||
self.perform_request(resp.request.method, resp.url, **kwargs)
|
||||
)
|
||||
|
||||
if not self.is_Captcha_Challenge(resp):
|
||||
return resp
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# if no captcha provider raise a runtime error.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if not self.captcha or not isinstance(self.captcha, dict) or not self.captcha.get('provider'):
|
||||
self.simpleException(
|
||||
CloudflareCaptchaProvider,
|
||||
"Cloudflare Captcha detected, unfortunately you haven't loaded an anti Captcha provider "
|
||||
"correctly via the 'captcha' parameter."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# if provider is return_response, return the response without doing anything.
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if self.captcha.get('provider') == 'return_response':
|
||||
return resp
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Submit request to parser wrapper to solve captcha
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
submit_url = self.captcha_Challenge_Response(
|
||||
self.captcha.get('provider'),
|
||||
self.captcha,
|
||||
resp.text,
|
||||
resp.url
|
||||
)
|
||||
else:
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Cloudflare requires a delay before solving the challenge
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if not self.delay:
|
||||
try:
|
||||
delay = float(
|
||||
re.search(
|
||||
r'submit\(\);\r?\n\s*},\s*([0-9]+)',
|
||||
resp.text
|
||||
).group(1)
|
||||
) / float(1000)
|
||||
if isinstance(delay, (int, float)):
|
||||
self.delay = delay
|
||||
except (AttributeError, ValueError):
|
||||
self.simpleException(
|
||||
CloudflareIUAMError,
|
||||
"Cloudflare IUAM possibility malformed, issue extracing delay value."
|
||||
)
|
||||
|
||||
sleep(self.delay)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
submit_url = self.IUAM_Challenge_Response(
|
||||
resp.text,
|
||||
resp.url,
|
||||
self.interpreter
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Send the Challenge Response back to Cloudflare
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if submit_url:
|
||||
|
||||
def updateAttr(obj, name, newValue):
|
||||
try:
|
||||
obj[name].update(newValue)
|
||||
return obj[name]
|
||||
except (AttributeError, KeyError):
|
||||
obj[name] = {}
|
||||
obj[name].update(newValue)
|
||||
return obj[name]
|
||||
|
||||
cloudflare_kwargs = deepcopy(kwargs)
|
||||
cloudflare_kwargs['allow_redirects'] = False
|
||||
cloudflare_kwargs['data'] = updateAttr(
|
||||
cloudflare_kwargs,
|
||||
'data',
|
||||
submit_url['data']
|
||||
)
|
||||
|
||||
urlParsed = urlparse(resp.url)
|
||||
cloudflare_kwargs['headers'] = updateAttr(
|
||||
cloudflare_kwargs,
|
||||
'headers',
|
||||
{
|
||||
'Origin': '{}://{}'.format(urlParsed.scheme, urlParsed.netloc),
|
||||
'Referer': resp.url
|
||||
}
|
||||
)
|
||||
|
||||
challengeSubmitResponse = self.request(
|
||||
'POST',
|
||||
submit_url['url'],
|
||||
**cloudflare_kwargs
|
||||
)
|
||||
|
||||
if challengeSubmitResponse.status_code == 400:
|
||||
self.simpleException(
|
||||
CloudflareSolveError,
|
||||
'Invalid challenge answer detected, Cloudflare broken?'
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Return response if Cloudflare is doing content pass through instead of 3xx
|
||||
# else request with redirect URL also handle protocol scheme change http -> https
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if not challengeSubmitResponse.is_redirect:
|
||||
return challengeSubmitResponse
|
||||
|
||||
else:
|
||||
cloudflare_kwargs = deepcopy(kwargs)
|
||||
cloudflare_kwargs['headers'] = updateAttr(
|
||||
cloudflare_kwargs,
|
||||
'headers',
|
||||
{'Referer': challengeSubmitResponse.url}
|
||||
)
|
||||
|
||||
if not urlparse(challengeSubmitResponse.headers['Location']).netloc:
|
||||
redirect_location = urljoin(
|
||||
challengeSubmitResponse.url,
|
||||
challengeSubmitResponse.headers['Location']
|
||||
)
|
||||
else:
|
||||
redirect_location = challengeSubmitResponse.headers['Location']
|
||||
|
||||
return self.request(
|
||||
resp.request.method,
|
||||
redirect_location,
|
||||
**cloudflare_kwargs
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# We shouldn't be here...
|
||||
# Re-request the original query and/or process again....
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
return self.request(resp.request.method, resp.url, **kwargs)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@classmethod
|
||||
def create_scraper(cls, sess=None, **kwargs):
|
||||
"""
|
||||
Convenience function for creating a ready-to-go CloudScraper object.
|
||||
"""
|
||||
scraper = cls(**kwargs)
|
||||
|
||||
if sess:
|
||||
for attr in ['auth', 'cert', 'cookies', 'headers', 'hooks', 'params', 'proxies', 'data']:
|
||||
val = getattr(sess, attr, None)
|
||||
if val:
|
||||
setattr(scraper, attr, val)
|
||||
|
||||
return scraper
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
# Functions for integrating cloudscraper with other applications and scripts
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@classmethod
|
||||
def get_tokens(cls, url, **kwargs):
|
||||
scraper = cls.create_scraper(
|
||||
**{
|
||||
field: kwargs.pop(field, None) for field in [
|
||||
'allow_brotli',
|
||||
'browser',
|
||||
'debug',
|
||||
'delay',
|
||||
'interpreter',
|
||||
'captcha',
|
||||
'requestPreHook',
|
||||
'requestPostHook',
|
||||
'source_address'
|
||||
] if field in kwargs
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
resp = scraper.get(url, **kwargs)
|
||||
resp.raise_for_status()
|
||||
except Exception:
|
||||
logging.error('"{}" returned an error. Could not collect tokens.'.format(url))
|
||||
raise
|
||||
|
||||
domain = urlparse(resp.url).netloc
|
||||
# noinspection PyUnusedLocal
|
||||
cookie_domain = None
|
||||
|
||||
for d in scraper.cookies.list_domains():
|
||||
if d.startswith('.') and d in ('.{}'.format(domain)):
|
||||
cookie_domain = d
|
||||
break
|
||||
else:
|
||||
cls.simpleException(
|
||||
CloudflareIUAMError,
|
||||
"Unable to find Cloudflare cookies. Does the site actually "
|
||||
"have Cloudflare IUAM (I'm Under Attack Mode) enabled?"
|
||||
)
|
||||
|
||||
return (
|
||||
{
|
||||
'__cfduid': scraper.cookies.get('__cfduid', '', domain=cookie_domain),
|
||||
'cf_clearance': scraper.cookies.get('cf_clearance', '', domain=cookie_domain)
|
||||
},
|
||||
scraper.headers['User-Agent']
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@classmethod
|
||||
def get_cookie_string(cls, url, **kwargs):
|
||||
"""
|
||||
Convenience function for building a Cookie HTTP header value.
|
||||
"""
|
||||
tokens, user_agent = cls.get_tokens(url, **kwargs)
|
||||
return '; '.join('='.join(pair) for pair in tokens.items()), user_agent
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if ssl.OPENSSL_VERSION_INFO < (1, 1, 1):
|
||||
print(
|
||||
"DEPRECATION: The OpenSSL being used by this python install ({}) does not meet the minimum supported "
|
||||
"version (>= OpenSSL 1.1.1) in order to support TLS 1.3 required by Cloudflare, "
|
||||
"You may encounter an unexpected Captcha or cloudflare 1020 blocks.".format(
|
||||
ssl.OPENSSL_VERSION
|
||||
)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
create_scraper = CloudScraper.create_scraper
|
||||
get_tokens = CloudScraper.get_tokens
|
||||
get_cookie_string = CloudScraper.get_cookie_string
|
||||
@@ -1,271 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import requests
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ..exceptions import (
|
||||
CaptchaServiceUnavailable,
|
||||
CaptchaAPIError,
|
||||
CaptchaTimeout,
|
||||
CaptchaParameter,
|
||||
CaptchaBadJobID,
|
||||
CaptchaReportError
|
||||
)
|
||||
|
||||
try:
|
||||
import polling
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Please install the python module 'polling' via pip or download it from "
|
||||
"https://github.com/justiniso/polling/"
|
||||
)
|
||||
|
||||
from . import Captcha
|
||||
|
||||
|
||||
class captchaSolver(Captcha):
|
||||
|
||||
def __init__(self):
|
||||
super(captchaSolver, self).__init__('2captcha')
|
||||
self.host = 'https://2captcha.com'
|
||||
self.session = requests.Session()
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def checkErrorStatus(response, request_type):
|
||||
if response.status_code in [500, 502]:
|
||||
raise CaptchaServiceUnavailable('2Captcha: Server Side Error {}'.format(response.status_code))
|
||||
|
||||
errors = {
|
||||
'in.php': {
|
||||
"ERROR_WRONG_USER_KEY": "You've provided api_key parameter value is in incorrect format, it should contain 32 symbols.",
|
||||
"ERROR_KEY_DOES_NOT_EXIST": "The api_key you've provided does not exists.",
|
||||
"ERROR_ZERO_BALANCE": "You don't have sufficient funds on your account.",
|
||||
"ERROR_PAGEURL": "pageurl parameter is missing in your request.",
|
||||
"ERROR_NO_SLOT_AVAILABLE":
|
||||
"No Slots Available.\nYou can receive this error in two cases:\n"
|
||||
"1. If you solve ReCaptcha: the queue of your captchas that are not distributed to workers is too long. "
|
||||
"Queue limit changes dynamically and depends on total amount of captchas awaiting solution and usually it's between 50 and 100 captchas.\n"
|
||||
"2. If you solve Normal Captcha: your maximum rate for normal captchas is lower than current rate on the server."
|
||||
"You can change your maximum rate in your account's settings.",
|
||||
"ERROR_IP_NOT_ALLOWED": "The request is sent from the IP that is not on the list of your allowed IPs.",
|
||||
"IP_BANNED": "Your IP address is banned due to many frequent attempts to access the server using wrong authorization keys.",
|
||||
"ERROR_BAD_TOKEN_OR_PAGEURL":
|
||||
"You can get this error code when sending ReCaptcha V2. "
|
||||
"That happens if your request contains invalid pair of googlekey and pageurl. "
|
||||
"The common reason for that is that ReCaptcha is loaded inside an iframe hosted on another domain/subdomain.",
|
||||
"ERROR_GOOGLEKEY":
|
||||
"You can get this error code when sending ReCaptcha V2. "
|
||||
"That means that sitekey value provided in your request is incorrect: it's blank or malformed.",
|
||||
"MAX_USER_TURN": "You made more than 60 requests within 3 seconds.Your account is banned for 10 seconds. Ban will be lifted automatically."
|
||||
},
|
||||
'res.php': {
|
||||
"ERROR_CAPTCHA_UNSOLVABLE":
|
||||
"We are unable to solve your captcha - three of our workers were unable solve it "
|
||||
"or we didn't get an answer within 90 seconds (300 seconds for ReCaptcha V2). "
|
||||
"We will not charge you for that request.",
|
||||
"ERROR_WRONG_USER_KEY": "You've provided api_key parameter value in incorrect format, it should contain 32 symbols.",
|
||||
"ERROR_KEY_DOES_NOT_EXIST": "The api_key you've provided does not exists.",
|
||||
"ERROR_WRONG_ID_FORMAT": "You've provided captcha ID in wrong format. The ID can contain numbers only.",
|
||||
"ERROR_WRONG_CAPTCHA_ID": "You've provided incorrect captcha ID.",
|
||||
"ERROR_BAD_DUPLICATES":
|
||||
"Error is returned when 100% accuracy feature is enabled. "
|
||||
"The error means that max numbers of tries is reached but min number of matches not found.",
|
||||
"REPORT_NOT_RECORDED": "Error is returned to your complain request if you already complained lots of correctly solved captchas.",
|
||||
"ERROR_IP_ADDRES":
|
||||
"You can receive this error code when registering a pingback (callback) IP or domain."
|
||||
"That happes if your request is coming from an IP address that doesn't match the IP address of your pingback IP or domain.",
|
||||
"ERROR_TOKEN_EXPIRED": "You can receive this error code when sending GeeTest. That error means that challenge value you provided is expired.",
|
||||
"ERROR_EMPTY_ACTION": "Action parameter is missing or no value is provided for action parameter."
|
||||
}
|
||||
}
|
||||
|
||||
if response.json().get('status') == 0 and response.json().get('request') in errors.get(request_type):
|
||||
raise CaptchaAPIError(
|
||||
'{} {}'.format(
|
||||
response.json().get('request'),
|
||||
errors.get(request_type).get(response.json().get('request'))
|
||||
)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def reportJob(self, jobID):
|
||||
if not jobID:
|
||||
raise CaptchaBadJobID(
|
||||
"2Captcha: Error bad job id to request Captcha."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get('status') == 1:
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response, 'res.php')
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.get(
|
||||
'{}/res.php'.format(self.host),
|
||||
params={
|
||||
'key': self.api_key,
|
||||
'action': 'reportbad',
|
||||
'id': jobID,
|
||||
'json': '1'
|
||||
},
|
||||
timeout=30
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=5,
|
||||
timeout=180
|
||||
)
|
||||
|
||||
if response:
|
||||
return True
|
||||
else:
|
||||
raise CaptchaReportError(
|
||||
"2Captcha: Error - Failed to report bad Captcha solve."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestJob(self, jobID):
|
||||
if not jobID:
|
||||
raise CaptchaBadJobID("2Captcha: Error bad job id to request Captcha.")
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get('status') == 1:
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response, 'res.php')
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.get(
|
||||
'{}/res.php'.format(self.host),
|
||||
params={
|
||||
'key': self.api_key,
|
||||
'action': 'get',
|
||||
'id': jobID,
|
||||
'json': '1'
|
||||
},
|
||||
timeout=30
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=5,
|
||||
timeout=180
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('request')
|
||||
else:
|
||||
raise CaptchaTimeout(
|
||||
"2Captcha: Error failed to solve Captcha."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestSolve(self, captchaType, url, siteKey):
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get("status") == 1 and response.json().get('request'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response, 'in.php')
|
||||
|
||||
return None
|
||||
|
||||
data = {
|
||||
'key': self.api_key,
|
||||
'pageurl': url,
|
||||
'json': 1,
|
||||
'soft_id': 5507698
|
||||
}
|
||||
|
||||
data.update(
|
||||
{
|
||||
'method': 'userrcaptcha',
|
||||
'googlekey': siteKey
|
||||
} if captchaType == 'reCaptcha' else {
|
||||
'method': 'hcaptcha',
|
||||
'sitekey': siteKey
|
||||
}
|
||||
)
|
||||
|
||||
if self.proxy:
|
||||
data.update(
|
||||
{
|
||||
'proxy': self.proxy,
|
||||
'proxytype': self.proxyType
|
||||
}
|
||||
)
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.post(
|
||||
'{}/in.php'.format(self.host),
|
||||
data=data,
|
||||
allow_redirects=False,
|
||||
timeout=30
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=5,
|
||||
timeout=180
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('request')
|
||||
else:
|
||||
raise CaptchaBadJobID(
|
||||
'2Captcha: Error no job id was returned.'
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, captchaType, url, siteKey, captchaParams):
|
||||
jobID = None
|
||||
|
||||
if not captchaParams.get('api_key'):
|
||||
raise CaptchaParameter(
|
||||
"2Captcha: Missing api_key parameter."
|
||||
)
|
||||
|
||||
self.api_key = captchaParams.get('api_key')
|
||||
|
||||
if captchaParams.get('proxy') and not captchaParams.get('no_proxy'):
|
||||
hostParsed = urlparse(captchaParams.get('proxy', {}).get('https'))
|
||||
|
||||
if not hostParsed.scheme:
|
||||
raise CaptchaParameter('Cannot parse proxy correctly, bad scheme')
|
||||
|
||||
if not hostParsed.netloc:
|
||||
raise CaptchaParameter('Cannot parse proxy correctly, bad netloc')
|
||||
|
||||
self.proxyType = hostParsed.scheme
|
||||
self.proxy = hostParsed.netloc
|
||||
else:
|
||||
self.proxy = None
|
||||
|
||||
try:
|
||||
jobID = self.requestSolve(captchaType, url, siteKey)
|
||||
return self.requestJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
try:
|
||||
if jobID:
|
||||
self.reportJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
raise CaptchaTimeout(
|
||||
"2Captcha: Captcha solve took to long and also failed reporting the job the job id {}.".format(jobID)
|
||||
)
|
||||
|
||||
raise CaptchaTimeout(
|
||||
"2Captcha: Captcha solve took to long to execute job id {}, aborting.".format(jobID)
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
captchaSolver()
|
||||
@@ -1,212 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
try:
|
||||
import polling
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Please install the python module 'polling' via pip or download it from "
|
||||
"https://github.com/justiniso/polling/"
|
||||
)
|
||||
|
||||
from ..exceptions import (
|
||||
reCaptchaServiceUnavailable,
|
||||
reCaptchaAPIError,
|
||||
reCaptchaTimeout,
|
||||
reCaptchaParameter,
|
||||
reCaptchaBadJobID
|
||||
)
|
||||
|
||||
from . import reCaptcha
|
||||
|
||||
|
||||
class captchaSolver(reCaptcha):
|
||||
|
||||
def __init__(self):
|
||||
super(captchaSolver, self).__init__('9kw')
|
||||
self.host = 'https://www.9kw.eu/index.cgi'
|
||||
self.maxtimeout = 180
|
||||
self.session = requests.Session()
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def checkErrorStatus(response):
|
||||
if response.status_code in [500, 502]:
|
||||
raise reCaptchaServiceUnavailable(
|
||||
'9kw: Server Side Error {}'.format(response.status_code)
|
||||
)
|
||||
|
||||
error_codes = {
|
||||
1: 'No API Key available.',
|
||||
2: 'No API key found.',
|
||||
3: 'No active API key found.',
|
||||
4: 'API Key has been disabled by the operator. ',
|
||||
5: 'No user found.',
|
||||
6: 'No data found.',
|
||||
7: 'Found No ID.',
|
||||
8: 'found No captcha.',
|
||||
9: 'No image found.',
|
||||
10: 'Image size not allowed.',
|
||||
11: 'credit is not sufficient.',
|
||||
12: 'what was done.',
|
||||
13: 'No answer contain.',
|
||||
14: 'Captcha already been answered.',
|
||||
15: 'Captcha to quickly filed.',
|
||||
16: 'JD check active.',
|
||||
17: 'Unknown problem.',
|
||||
18: 'Found No ID.',
|
||||
19: 'Incorrect answer.',
|
||||
20: 'Do not timely filed (Incorrect UserID).',
|
||||
21: 'Link not allowed.',
|
||||
22: 'Prohibited submit.',
|
||||
23: 'Entering prohibited.',
|
||||
24: 'Too little credit.',
|
||||
25: 'No entry found.',
|
||||
26: 'No Conditions accepted.',
|
||||
27: 'No coupon code found in the database.',
|
||||
28: 'Already unused voucher code.',
|
||||
29: 'maxTimeout under 60 seconds.',
|
||||
30: 'User not found.',
|
||||
31: 'An account is not yet 24 hours in system.',
|
||||
32: 'An account does not have the full rights.',
|
||||
33: 'Plugin needed a update.',
|
||||
34: 'No HTTPS allowed.',
|
||||
35: 'No HTTP allowed.',
|
||||
36: 'Source not allowed.',
|
||||
37: 'Transfer denied.',
|
||||
38: 'Incorrect answer without space',
|
||||
39: 'Incorrect answer with space',
|
||||
40: 'Incorrect answer with not only numbers',
|
||||
41: 'Incorrect answer with not only A-Z, a-z',
|
||||
42: 'Incorrect answer with not only 0-9, A-Z, a-z',
|
||||
43: 'Incorrect answer with not only [0-9,- ]',
|
||||
44: 'Incorrect answer with not only [0-9A-Za-z,- ]',
|
||||
45: 'Incorrect answer with not only coordinates',
|
||||
46: 'Incorrect answer with not only multiple coordinates',
|
||||
47: 'Incorrect answer with not only data',
|
||||
48: 'Incorrect answer with not only rotate number',
|
||||
49: 'Incorrect answer with not only text',
|
||||
50: 'Incorrect answer with not only text and too short',
|
||||
51: 'Incorrect answer with not enough chars',
|
||||
52: 'Incorrect answer with too many chars',
|
||||
53: 'Incorrect answer without no or yes',
|
||||
54: 'Assignment was not found.'
|
||||
}
|
||||
|
||||
if response.text.startswith('{'):
|
||||
if response.json().get('error'):
|
||||
raise reCaptchaAPIError(error_codes.get(int(response.json().get('error'))))
|
||||
else:
|
||||
error_code = int(re.search(r'^00(?P<error_code>\d+)', response.text).groupdict().get('error_code', 0))
|
||||
if error_code:
|
||||
raise reCaptchaAPIError(error_codes.get(error_code))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestJob(self, jobID):
|
||||
if not jobID:
|
||||
raise reCaptchaBadJobID(
|
||||
"9kw: Error bad job id to request reCaptcha against."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get('answer') != 'NO DATA':
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.get(
|
||||
self.host,
|
||||
params={
|
||||
'apikey': self.api_key,
|
||||
'action': 'usercaptchacorrectdata',
|
||||
'id': jobID,
|
||||
'info': 1,
|
||||
'json': 1
|
||||
}
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=10,
|
||||
timeout=(self.maxtimeout + 10)
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('answer')
|
||||
else:
|
||||
raise reCaptchaTimeout("9kw: Error failed to solve reCaptcha.")
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestSolve(self, captchaType, url, siteKey):
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.text.startswith('{') and response.json().get('captchaid'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
captchaMap = {
|
||||
'reCaptcha': 'recaptchav2',
|
||||
'hCaptcha': 'hcaptcha'
|
||||
}
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.post(
|
||||
self.host,
|
||||
data={
|
||||
'apikey': self.api_key,
|
||||
'action': 'usercaptchaupload',
|
||||
'interactive': 1,
|
||||
'file-upload-01': siteKey,
|
||||
'oldsource': captchaMap[captchaType],
|
||||
'pageurl': url,
|
||||
'maxtimeout': self.maxtimeout,
|
||||
'json': 1
|
||||
},
|
||||
allow_redirects=False
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=5,
|
||||
timeout=(self.maxtimeout + 10)
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('captchaid')
|
||||
else:
|
||||
raise reCaptchaBadJobID('9kw: Error no valid job id was returned.')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
|
||||
jobID = None
|
||||
|
||||
if not reCaptchaParams.get('api_key'):
|
||||
raise reCaptchaParameter("9kw: Missing api_key parameter.")
|
||||
|
||||
self.api_key = reCaptchaParams.get('api_key')
|
||||
|
||||
if reCaptchaParams.get('maxtimeout'):
|
||||
self.maxtimeout = reCaptchaParams.get('maxtimeout')
|
||||
|
||||
if reCaptchaParams.get('proxy'):
|
||||
self.session.proxies = reCaptchaParams.get('proxies')
|
||||
|
||||
try:
|
||||
jobID = self.requestSolve(captchaType, url, siteKey)
|
||||
return self.requestJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
raise reCaptchaTimeout(
|
||||
"9kw: reCaptcha solve took to long to execute 'captchaid' {}, aborting.".format(jobID)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
captchaSolver()
|
||||
@@ -1,46 +0,0 @@
|
||||
import abc
|
||||
import logging
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 4):
|
||||
ABC = abc.ABC # noqa
|
||||
else:
|
||||
ABC = abc.ABCMeta('ABC', (), {})
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
captchaSolvers = {}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class Captcha(ABC):
|
||||
@abc.abstractmethod
|
||||
def __init__(self, name):
|
||||
captchaSolvers[name] = self
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@classmethod
|
||||
def dynamicImport(cls, name):
|
||||
if name not in captchaSolvers:
|
||||
try:
|
||||
__import__('{}.{}'.format(cls.__module__, name))
|
||||
if not isinstance(captchaSolvers.get(name), Captcha):
|
||||
raise ImportError('The anti captcha provider was not initialized.')
|
||||
except ImportError:
|
||||
logging.error("Unable to load {} anti captcha provider".format(name))
|
||||
raise
|
||||
|
||||
return captchaSolvers[name]
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@abc.abstractmethod
|
||||
def getCaptchaAnswer(self, captchaType, url, siteKey, captchaParams):
|
||||
pass
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def solveCaptcha(self, captchaType, url, siteKey, captchaParams):
|
||||
return self.getCaptchaAnswer(captchaType, url, siteKey, captchaParams)
|
||||
@@ -1,109 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
from ..exceptions import (
|
||||
CaptchaParameter,
|
||||
CaptchaTimeout,
|
||||
CaptchaAPIError
|
||||
)
|
||||
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
try:
|
||||
from python_anticaptcha import (
|
||||
AnticaptchaClient,
|
||||
NoCaptchaTaskProxylessTask,
|
||||
HCaptchaTaskProxyless,
|
||||
NoCaptchaTask,
|
||||
HCaptchaTask,
|
||||
AnticaptchaException
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Please install/upgrade the python module 'python_anticaptcha' via "
|
||||
"pip install python-anticaptcha or https://github.com/ad-m/python-anticaptcha/"
|
||||
)
|
||||
|
||||
import sys
|
||||
|
||||
from . import Captcha
|
||||
|
||||
|
||||
class captchaSolver(Captcha):
|
||||
|
||||
def __init__(self):
|
||||
if sys.modules['python_anticaptcha'].__version__ < '0.6':
|
||||
raise ImportError(
|
||||
"Please upgrade the python module 'python_anticaptcha' via "
|
||||
"pip install -U python-anticaptcha or https://github.com/ad-m/python-anticaptcha/"
|
||||
)
|
||||
super(captchaSolver, self).__init__('anticaptcha')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def parseProxy(self, url, user_agent):
|
||||
parsed = urlparse(url)
|
||||
|
||||
return dict(
|
||||
proxy_type=parsed.scheme,
|
||||
proxy_address=parsed.hostname,
|
||||
proxy_port=parsed.port,
|
||||
proxy_login=parsed.username,
|
||||
proxy_password=parsed.password,
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, captchaType, url, siteKey, captchaParams):
|
||||
if not captchaParams.get('api_key'):
|
||||
raise CaptchaParameter("anticaptcha: Missing api_key parameter.")
|
||||
|
||||
client = AnticaptchaClient(captchaParams.get('api_key'))
|
||||
|
||||
if captchaParams.get('proxy') and not captchaParams.get('no_proxy'):
|
||||
captchaMap = {
|
||||
'reCaptcha': NoCaptchaTask,
|
||||
'hCaptcha': HCaptchaTask
|
||||
}
|
||||
|
||||
proxy = self.parseProxy(
|
||||
captchaParams.get('proxy', {}).get('https'),
|
||||
captchaParams.get('User-Agent', '')
|
||||
)
|
||||
|
||||
task = captchaMap[captchaType](
|
||||
url,
|
||||
siteKey,
|
||||
**proxy
|
||||
)
|
||||
else:
|
||||
captchaMap = {
|
||||
'reCaptcha': NoCaptchaTaskProxylessTask,
|
||||
'hCaptcha': HCaptchaTaskProxyless
|
||||
}
|
||||
task = captchaMap[captchaType](url, siteKey)
|
||||
|
||||
if not hasattr(client, 'createTaskSmee'):
|
||||
raise NotImplementedError(
|
||||
"Please upgrade 'python_anticaptcha' via pip or download it from "
|
||||
"https://github.com/ad-m/python-anticaptcha/tree/hcaptcha"
|
||||
)
|
||||
|
||||
job = client.createTaskSmee(task, timeout=180)
|
||||
|
||||
try:
|
||||
job.join(maximum_time=180)
|
||||
except (AnticaptchaException) as e:
|
||||
raise CaptchaTimeout('{}'.format(getattr(e, 'message', e)))
|
||||
|
||||
if 'solution' in job._last_result:
|
||||
return job.get_solution_response()
|
||||
else:
|
||||
raise CaptchaAPIError('Job did not return `solution` key in payload.')
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
captchaSolver()
|
||||
@@ -1,233 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import requests
|
||||
|
||||
try:
|
||||
import polling
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Please install the python module 'polling' via pip or download it from "
|
||||
"https://github.com/justiniso/polling/"
|
||||
)
|
||||
|
||||
from ..exceptions import (
|
||||
reCaptchaException,
|
||||
reCaptchaServiceUnavailable,
|
||||
reCaptchaAccountError,
|
||||
reCaptchaTimeout,
|
||||
reCaptchaParameter,
|
||||
reCaptchaBadJobID,
|
||||
reCaptchaReportError
|
||||
)
|
||||
|
||||
from . import reCaptcha
|
||||
|
||||
|
||||
class captchaSolver(reCaptcha):
|
||||
|
||||
def __init__(self):
|
||||
super(captchaSolver, self).__init__('deathbycaptcha')
|
||||
self.host = 'http://api.dbcapi.me/api'
|
||||
self.session = requests.Session()
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def checkErrorStatus(response):
|
||||
errors = dict(
|
||||
[
|
||||
(400, "DeathByCaptcha: 400 Bad Request"),
|
||||
(403, "DeathByCaptcha: 403 Forbidden - Invalid credentails or insufficient credits."),
|
||||
# (500, "DeathByCaptcha: 500 Internal Server Error."),
|
||||
(503, "DeathByCaptcha: 503 Service Temporarily Unavailable.")
|
||||
]
|
||||
)
|
||||
|
||||
if response.status_code in errors:
|
||||
raise reCaptchaServiceUnavailable(errors.get(response.status_code))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def login(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.ok:
|
||||
if response.json().get('is_banned'):
|
||||
raise reCaptchaAccountError('DeathByCaptcha: Your account is banned.')
|
||||
|
||||
if response.json().get('balanace') == 0:
|
||||
raise reCaptchaAccountError('DeathByCaptcha: insufficient credits.')
|
||||
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.post(
|
||||
'{}/user'.format(self.host),
|
||||
headers={'Accept': 'application/json'},
|
||||
data={
|
||||
'username': self.username,
|
||||
'password': self.password
|
||||
}
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=10,
|
||||
timeout=120
|
||||
)
|
||||
|
||||
self.debugRequest(response)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def reportJob(self, jobID):
|
||||
if not jobID:
|
||||
raise reCaptchaBadJobID(
|
||||
"DeathByCaptcha: Error bad job id to report failed reCaptcha."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.status_code == 200:
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.post(
|
||||
'{}/captcha/{}/report'.format(self.host, jobID),
|
||||
headers={'Accept': 'application/json'},
|
||||
data={
|
||||
'username': self.username,
|
||||
'password': self.password
|
||||
}
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=10,
|
||||
timeout=180
|
||||
)
|
||||
|
||||
if response:
|
||||
return True
|
||||
else:
|
||||
raise reCaptchaReportError(
|
||||
"DeathByCaptcha: Error report failed reCaptcha."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestJob(self, jobID):
|
||||
if not jobID:
|
||||
raise reCaptchaBadJobID(
|
||||
"DeathByCaptcha: Error bad job id to request reCaptcha."
|
||||
)
|
||||
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get('text'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.get(
|
||||
'{}/captcha/{}'.format(self.host, jobID),
|
||||
headers={'Accept': 'application/json'}
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=10,
|
||||
timeout=180
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('text')
|
||||
else:
|
||||
raise reCaptchaTimeout(
|
||||
"DeathByCaptcha: Error failed to solve reCaptcha."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def requestSolve(self, url, siteKey):
|
||||
def _checkRequest(response):
|
||||
if response.ok and response.json().get("is_correct") and response.json().get('captcha'):
|
||||
return response
|
||||
|
||||
self.checkErrorStatus(response)
|
||||
|
||||
return None
|
||||
|
||||
response = polling.poll(
|
||||
lambda: self.session.post(
|
||||
'{}/captcha'.format(self.host),
|
||||
headers={'Accept': 'application/json'},
|
||||
data={
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'type': '4',
|
||||
'token_params': json.dumps({
|
||||
'googlekey': siteKey,
|
||||
'pageurl': url
|
||||
})
|
||||
},
|
||||
allow_redirects=False
|
||||
),
|
||||
check_success=_checkRequest,
|
||||
step=10,
|
||||
timeout=180
|
||||
)
|
||||
|
||||
if response:
|
||||
return response.json().get('captcha')
|
||||
else:
|
||||
raise reCaptchaBadJobID(
|
||||
'DeathByCaptcha: Error no job id was returned.'
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
|
||||
jobID = None
|
||||
|
||||
for param in ['username', 'password']:
|
||||
if not reCaptchaParams.get(param):
|
||||
raise reCaptchaParameter(
|
||||
"DeathByCaptcha: Missing '{}' parameter.".format(param)
|
||||
)
|
||||
setattr(self, param, reCaptchaParams.get(param))
|
||||
|
||||
if captchaType == 'hCaptcha':
|
||||
raise reCaptchaException(
|
||||
'Provider does not support hCaptcha.'
|
||||
)
|
||||
|
||||
if reCaptchaParams.get('proxy'):
|
||||
self.session.proxies = reCaptchaParams.get('proxies')
|
||||
|
||||
try:
|
||||
jobID = self.requestSolve(url, siteKey)
|
||||
return self.requestJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
try:
|
||||
if jobID:
|
||||
self.reportJob(jobID)
|
||||
except polling.TimeoutException:
|
||||
raise reCaptchaTimeout(
|
||||
"DeathByCaptcha: reCaptcha solve took to long and also failed reporting the job id {}.".format(jobID)
|
||||
)
|
||||
|
||||
raise reCaptchaTimeout(
|
||||
"DeathByCaptcha: reCaptcha solve took to long to execute job id {}, aborting.".format(jobID)
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
captchaSolver()
|
||||
@@ -1,111 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
"""
|
||||
cloudscraper.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
This module contains the set of cloudscraper exceptions.
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class CloudflareException(Exception):
|
||||
"""
|
||||
Base exception class for cloudscraper for Cloudflare
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareLoopProtection(CloudflareException):
|
||||
"""
|
||||
Raise an exception for recursive depth protection
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareCode1020(CloudflareException):
|
||||
"""
|
||||
Raise an exception for Cloudflare code 1020 block
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareIUAMError(CloudflareException):
|
||||
"""
|
||||
Raise an error for problem extracting IUAM paramters
|
||||
from Cloudflare payload
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareChallengeError(CloudflareException):
|
||||
"""
|
||||
Raise an error when detected new Cloudflare challenge
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareSolveError(CloudflareException):
|
||||
"""
|
||||
Raise an error when issue with solving Cloudflare challenge
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareCaptchaError(CloudflareException):
|
||||
"""
|
||||
Raise an error for problem extracting Captcha paramters
|
||||
from Cloudflare payload
|
||||
"""
|
||||
|
||||
|
||||
class CloudflareCaptchaProvider(CloudflareException):
|
||||
"""
|
||||
Raise an exception for no Captcha provider loaded for Cloudflare.
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class CaptchaException(Exception):
|
||||
"""
|
||||
Base exception class for cloudscraper captcha Providers
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaServiceUnavailable(CaptchaException):
|
||||
"""
|
||||
Raise an exception for external services that cannot be reached
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaAPIError(CaptchaException):
|
||||
"""
|
||||
Raise an error for error from API response.
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaAccountError(CaptchaException):
|
||||
"""
|
||||
Raise an error for captcha provider account problem.
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaTimeout(CaptchaException):
|
||||
"""
|
||||
Raise an exception for captcha provider taking too long.
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaParameter(CaptchaException):
|
||||
"""
|
||||
Raise an exception for bad or missing Parameter.
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaBadJobID(CaptchaException):
|
||||
"""
|
||||
Raise an exception for invalid job id.
|
||||
"""
|
||||
|
||||
|
||||
class CaptchaReportError(CaptchaException):
|
||||
"""
|
||||
Raise an error for captcha provider unable to report bad solve.
|
||||
"""
|
||||
@@ -1,79 +0,0 @@
|
||||
import json
|
||||
import platform
|
||||
import requests
|
||||
import ssl
|
||||
import sys
|
||||
import urllib3
|
||||
|
||||
from collections import OrderedDict
|
||||
from . import __version__ as cloudscraper_version
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
def getPossibleCiphers():
|
||||
try:
|
||||
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||
context.set_ciphers('ALL')
|
||||
return sorted([cipher['name'] for cipher in context.get_ciphers()])
|
||||
except AttributeError:
|
||||
return 'get_ciphers() is unsupported'
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
def _pythonVersion():
|
||||
interpreter = platform.python_implementation()
|
||||
interpreter_version = platform.python_version()
|
||||
|
||||
if interpreter == 'PyPy':
|
||||
interpreter_version = '{}.{}.{}'.format(
|
||||
sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor,
|
||||
sys.pypy_version_info.micro
|
||||
)
|
||||
if sys.pypy_version_info.releaselevel != 'final':
|
||||
interpreter_version = '{}{}'.format(
|
||||
interpreter_version,
|
||||
sys.pypy_version_info.releaselevel
|
||||
)
|
||||
|
||||
return {
|
||||
'name': interpreter,
|
||||
'version': interpreter_version
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
def systemInfo():
|
||||
try:
|
||||
platform_info = {
|
||||
'system': platform.system(),
|
||||
'release': platform.release(),
|
||||
}
|
||||
except IOError:
|
||||
platform_info = {
|
||||
'system': 'Unknown',
|
||||
'release': 'Unknown',
|
||||
}
|
||||
|
||||
return OrderedDict([
|
||||
('platform', platform_info),
|
||||
('interpreter', _pythonVersion()),
|
||||
('cloudscraper', cloudscraper_version),
|
||||
('requests', requests.__version__),
|
||||
('urllib3', urllib3.__version__),
|
||||
('OpenSSL', OrderedDict(
|
||||
[
|
||||
('version', ssl.OPENSSL_VERSION),
|
||||
('ciphers', getPossibleCiphers())
|
||||
]
|
||||
))
|
||||
])
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(json.dumps(systemInfo(), indent=4))
|
||||
@@ -1,56 +0,0 @@
|
||||
import sys
|
||||
import logging
|
||||
import abc
|
||||
|
||||
from ..exceptions import CloudflareSolveError
|
||||
|
||||
if sys.version_info >= (3, 4):
|
||||
ABC = abc.ABC # noqa
|
||||
else:
|
||||
ABC = abc.ABCMeta('ABC', (), {})
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
interpreters = {}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class JavaScriptInterpreter(ABC):
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, name):
|
||||
interpreters[name] = self
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@classmethod
|
||||
def dynamicImport(cls, name):
|
||||
if name not in interpreters:
|
||||
try:
|
||||
__import__('{}.{}'.format(cls.__module__, name))
|
||||
if not isinstance(interpreters.get(name), JavaScriptInterpreter):
|
||||
raise ImportError('The interpreter was not initialized.')
|
||||
except ImportError:
|
||||
logging.error('Unable to load {} interpreter'.format(name))
|
||||
raise
|
||||
|
||||
return interpreters[name]
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@abc.abstractmethod
|
||||
def eval(self, jsEnv, js):
|
||||
pass
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def solveChallenge(self, body, domain):
|
||||
try:
|
||||
return '{0:.10f}'.format(float(self.eval(body, domain)))
|
||||
except Exception:
|
||||
raise CloudflareSolveError(
|
||||
'Error trying to solve Cloudflare IUAM Javascript, they may have changed their technique.'
|
||||
)
|
||||
@@ -1,103 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ctypes.util
|
||||
|
||||
from ctypes import c_void_p, c_size_t, byref, create_string_buffer, CDLL
|
||||
|
||||
from . import JavaScriptInterpreter
|
||||
from .encapsulated import template
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class ChallengeInterpreter(JavaScriptInterpreter):
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def __init__(self):
|
||||
super(ChallengeInterpreter, self).__init__('chakracore')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def eval(self, body, domain):
|
||||
chakraCoreLibrary = None
|
||||
|
||||
# check current working directory.
|
||||
for _libraryFile in ['libChakraCore.so', 'libChakraCore.dylib', 'ChakraCore.dll']:
|
||||
if os.path.isfile(os.path.join(os.getcwd(), _libraryFile)):
|
||||
chakraCoreLibrary = os.path.join(os.getcwd(), _libraryFile)
|
||||
continue
|
||||
|
||||
if not chakraCoreLibrary:
|
||||
chakraCoreLibrary = ctypes.util.find_library('ChakraCore')
|
||||
|
||||
if not chakraCoreLibrary:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError(
|
||||
'ChakraCore library not found in current path or any of your system library paths, '
|
||||
'please download from https://www.github.com/VeNoMouS/cloudscraper/tree/ChakraCore/, '
|
||||
'or https://github.com/Microsoft/ChakraCore/'
|
||||
)
|
||||
|
||||
try:
|
||||
chakraCore = CDLL(chakraCoreLibrary)
|
||||
except OSError:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('There was an error loading the ChakraCore library {}'.format(chakraCoreLibrary))
|
||||
|
||||
if sys.platform != 'win32':
|
||||
chakraCore.DllMain(0, 1, 0)
|
||||
chakraCore.DllMain(0, 2, 0)
|
||||
|
||||
script = create_string_buffer(template(body, domain).encode('utf-16'))
|
||||
|
||||
runtime = c_void_p()
|
||||
chakraCore.JsCreateRuntime(0, 0, byref(runtime))
|
||||
|
||||
context = c_void_p()
|
||||
chakraCore.JsCreateContext(runtime, byref(context))
|
||||
chakraCore.JsSetCurrentContext(context)
|
||||
|
||||
fname = c_void_p()
|
||||
chakraCore.JsCreateString(
|
||||
'iuam-challenge.js',
|
||||
len('iuam-challenge.js'),
|
||||
byref(fname)
|
||||
)
|
||||
|
||||
scriptSource = c_void_p()
|
||||
chakraCore.JsCreateExternalArrayBuffer(
|
||||
script,
|
||||
len(script),
|
||||
0,
|
||||
0,
|
||||
byref(scriptSource)
|
||||
)
|
||||
|
||||
jsResult = c_void_p()
|
||||
chakraCore.JsRun(scriptSource, 0, fname, 0x02, byref(jsResult))
|
||||
|
||||
resultJSString = c_void_p()
|
||||
chakraCore.JsConvertValueToString(jsResult, byref(resultJSString))
|
||||
|
||||
stringLength = c_size_t()
|
||||
chakraCore.JsCopyString(resultJSString, 0, 0, byref(stringLength))
|
||||
|
||||
resultSTR = create_string_buffer(stringLength.value + 1)
|
||||
chakraCore.JsCopyString(
|
||||
resultJSString,
|
||||
byref(resultSTR),
|
||||
stringLength.value + 1,
|
||||
0
|
||||
)
|
||||
|
||||
chakraCore.JsDisposeRuntime(runtime)
|
||||
|
||||
return resultSTR.value
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
ChallengeInterpreter()
|
||||
@@ -1,62 +0,0 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
def template(body, domain):
|
||||
BUG_REPORT = 'Cloudflare may have changed their technique, or there may be a bug in the script.'
|
||||
|
||||
try:
|
||||
js = re.search(
|
||||
r'setTimeout\(function\(\){\s+(.*?a\.value\s*=\s*\S+toFixed\(10\);)',
|
||||
body,
|
||||
re.M | re.S
|
||||
).group(1)
|
||||
except Exception:
|
||||
raise ValueError('Unable to identify Cloudflare IUAM Javascript on website. {}'.format(BUG_REPORT))
|
||||
|
||||
jsEnv = '''String.prototype.italics=function(str) {{return "<i>" + this + "</i>";}};
|
||||
var subVars= {{{subVars}}};
|
||||
var document = {{
|
||||
createElement: function () {{
|
||||
return {{ firstChild: {{ href: "https://{domain}/" }} }}
|
||||
}},
|
||||
getElementById: function (str) {{
|
||||
return {{"innerHTML": subVars[str]}};
|
||||
}}
|
||||
}};
|
||||
'''
|
||||
|
||||
try:
|
||||
js = js.replace(
|
||||
r"(setInterval(function(){}, 100),t.match(/https?:\/\//)[0]);",
|
||||
r"t.match(/https?:\/\//)[0];"
|
||||
)
|
||||
|
||||
k = re.search(r" k\s*=\s*'(?P<k>\S+)';", body).group('k')
|
||||
r = re.compile(r'<div id="{}(?P<id>\d+)">\s*(?P<jsfuck>[^<>]*)</div>'.format(k))
|
||||
|
||||
subVars = ''
|
||||
for m in r.finditer(body):
|
||||
subVars = '{}\n\t\t{}{}: {},\n'.format(subVars, k, m.group('id'), m.group('jsfuck'))
|
||||
subVars = subVars[:-2]
|
||||
|
||||
except: # noqa
|
||||
logging.error('Error extracting Cloudflare IUAM Javascript. {}'.format(BUG_REPORT))
|
||||
raise
|
||||
|
||||
return '{}{}'.format(
|
||||
re.sub(
|
||||
r'\s{2,}',
|
||||
' ',
|
||||
jsEnv.format(
|
||||
domain=domain,
|
||||
subVars=subVars
|
||||
),
|
||||
re.MULTILINE | re.DOTALL
|
||||
),
|
||||
js
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
@@ -1,44 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import js2py
|
||||
import logging
|
||||
import base64
|
||||
|
||||
from . import JavaScriptInterpreter
|
||||
|
||||
from .encapsulated import template
|
||||
from .jsunfuck import jsunfuck
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class ChallengeInterpreter(JavaScriptInterpreter):
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def __init__(self):
|
||||
super(ChallengeInterpreter, self).__init__('js2py')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def eval(self, body, domain):
|
||||
|
||||
jsPayload = template(body, domain)
|
||||
|
||||
if js2py.eval_js('(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]') == '1':
|
||||
logging.warning('WARNING - Please upgrade your js2py https://github.com/PiotrDabkowski/Js2Py, applying work around for the meantime.')
|
||||
jsPayload = jsunfuck(jsPayload)
|
||||
|
||||
def atob(s):
|
||||
return base64.b64decode('{}'.format(s)).decode('utf-8')
|
||||
|
||||
js2py.disable_pyimport()
|
||||
context = js2py.EvalJs({'atob': atob})
|
||||
result = context.eval(jsPayload)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
ChallengeInterpreter()
|
||||
@@ -1,97 +0,0 @@
|
||||
MAPPING = {
|
||||
'a': '(false+"")[1]',
|
||||
'b': '([]["entries"]()+"")[2]',
|
||||
'c': '([]["fill"]+"")[3]',
|
||||
'd': '(undefined+"")[2]',
|
||||
'e': '(true+"")[3]',
|
||||
'f': '(false+"")[0]',
|
||||
'g': '(false+[0]+String)[20]',
|
||||
'h': '(+(101))["to"+String["name"]](21)[1]',
|
||||
'i': '([false]+undefined)[10]',
|
||||
'j': '([]["entries"]()+"")[3]',
|
||||
'k': '(+(20))["to"+String["name"]](21)',
|
||||
'l': '(false+"")[2]',
|
||||
'm': '(Number+"")[11]',
|
||||
'n': '(undefined+"")[1]',
|
||||
'o': '(true+[]["fill"])[10]',
|
||||
'p': '(+(211))["to"+String["name"]](31)[1]',
|
||||
'q': '(+(212))["to"+String["name"]](31)[1]',
|
||||
'r': '(true+"")[1]',
|
||||
's': '(false+"")[3]',
|
||||
't': '(true+"")[0]',
|
||||
'u': '(undefined+"")[0]',
|
||||
'v': '(+(31))["to"+String["name"]](32)',
|
||||
'w': '(+(32))["to"+String["name"]](33)',
|
||||
'x': '(+(101))["to"+String["name"]](34)[1]',
|
||||
'y': '(NaN+[Infinity])[10]',
|
||||
'z': '(+(35))["to"+String["name"]](36)',
|
||||
'A': '(+[]+Array)[10]',
|
||||
'B': '(+[]+Boolean)[10]',
|
||||
'C': 'Function("return escape")()(("")["italics"]())[2]',
|
||||
'D': 'Function("return escape")()([]["fill"])["slice"]("-1")',
|
||||
'E': '(RegExp+"")[12]',
|
||||
'F': '(+[]+Function)[10]',
|
||||
'G': '(false+Function("return Date")()())[30]',
|
||||
'I': '(Infinity+"")[0]',
|
||||
'M': '(true+Function("return Date")()())[30]',
|
||||
'N': '(NaN+"")[0]',
|
||||
'O': '(NaN+Function("return{}")())[11]',
|
||||
'R': '(+[]+RegExp)[10]',
|
||||
'S': '(+[]+String)[10]',
|
||||
'T': '(NaN+Function("return Date")()())[30]',
|
||||
'U': '(NaN+Function("return{}")()["to"+String["name"]]["call"]())[11]',
|
||||
' ': '(NaN+[]["fill"])[11]',
|
||||
'"': '("")["fontcolor"]()[12]',
|
||||
'%': 'Function("return escape")()([]["fill"])[21]',
|
||||
'&': '("")["link"](0+")[10]',
|
||||
'(': '(undefined+[]["fill"])[22]',
|
||||
')': '([0]+false+[]["fill"])[20]',
|
||||
'+': '(+(+!+[]+(!+[]+[])[!+[]+!+[]+!+[]]+[+!+[]]+[+[]]+[+[]])+[])[2]',
|
||||
',': '([]["slice"]["call"](false+"")+"")[1]',
|
||||
'-': '(+(.+[0000000001])+"")[2]',
|
||||
'.': '(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]',
|
||||
'/': '(false+[0])["italics"]()[10]',
|
||||
':': '(RegExp()+"")[3]',
|
||||
';': '("")["link"](")[14]',
|
||||
'<': '("")["italics"]()[0]',
|
||||
'=': '("")["fontcolor"]()[11]',
|
||||
'>': '("")["italics"]()[2]',
|
||||
'?': '(RegExp()+"")[2]',
|
||||
'[': '([]["entries"]()+"")[0]',
|
||||
']': '([]["entries"]()+"")[22]',
|
||||
'{': '(true+[]["fill"])[20]',
|
||||
'}': '([]["fill"]+"")["slice"]("-1")'
|
||||
}
|
||||
|
||||
SIMPLE = {
|
||||
'false': '![]',
|
||||
'true': '!![]',
|
||||
'undefined': '[][[]]',
|
||||
'NaN': '+[![]]',
|
||||
'Infinity': '+(+!+[]+(!+[]+[])[!+[]+!+[]+!+[]]+[+!+[]]+[+[]]+[+[]]+[+[]])' # +"1e1000"
|
||||
}
|
||||
|
||||
CONSTRUCTORS = {
|
||||
'Array': '[]',
|
||||
'Number': '(+[])',
|
||||
'String': '([]+[])',
|
||||
'Boolean': '(![])',
|
||||
'Function': '[]["fill"]',
|
||||
'RegExp': 'Function("return/"+false+"/")()'
|
||||
}
|
||||
|
||||
|
||||
def jsunfuck(jsfuckString):
|
||||
for key in sorted(MAPPING, key=lambda k: len(MAPPING[k]), reverse=True):
|
||||
if MAPPING.get(key) in jsfuckString:
|
||||
jsfuckString = jsfuckString.replace(MAPPING.get(key), '"{}"'.format(key))
|
||||
|
||||
for key in sorted(SIMPLE, key=lambda k: len(SIMPLE[k]), reverse=True):
|
||||
if SIMPLE.get(key) in jsfuckString:
|
||||
jsfuckString = jsfuckString.replace(SIMPLE.get(key), '{}'.format(key))
|
||||
|
||||
# for key in sorted(CONSTRUCTORS, key=lambda k: len(CONSTRUCTORS[k]), reverse=True):
|
||||
# if CONSTRUCTORS.get(key) in jsfuckString:
|
||||
# jsfuckString = jsfuckString.replace(CONSTRUCTORS.get(key), '{}'.format(key))
|
||||
|
||||
return jsfuckString
|
||||
@@ -1,233 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import ast
|
||||
import re
|
||||
import operator as op
|
||||
import pyparsing
|
||||
|
||||
from ..exceptions import CloudflareSolveError
|
||||
from . import JavaScriptInterpreter
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
_OP_MAP = {
|
||||
ast.Add: op.add,
|
||||
ast.Sub: op.sub,
|
||||
ast.Mult: op.mul,
|
||||
ast.Div: op.truediv,
|
||||
ast.Invert: op.neg,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class Calc(ast.NodeVisitor):
|
||||
|
||||
def visit_BinOp(self, node):
|
||||
return _OP_MAP[type(node.op)](self.visit(node.left), self.visit(node.right))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def visit_Num(self, node):
|
||||
return node.n
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def visit_Expr(self, node):
|
||||
return self.visit(node.value)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
@classmethod
|
||||
def doMath(cls, expression):
|
||||
tree = ast.parse(expression)
|
||||
calc = cls()
|
||||
return calc.visit(tree.body[0])
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class Parentheses(object):
|
||||
|
||||
def fix(self, s):
|
||||
res = []
|
||||
self.visited = set([s])
|
||||
self.dfs(s, self.invalid(s), res)
|
||||
return res
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def dfs(self, s, n, res):
|
||||
if n == 0:
|
||||
res.append(s)
|
||||
return
|
||||
for i in range(len(s)):
|
||||
if s[i] in ['(', ')']:
|
||||
s_new = s[:i] + s[i + 1:]
|
||||
if s_new not in self.visited and self.invalid(s_new) < n:
|
||||
self.visited.add(s_new)
|
||||
self.dfs(s_new, self.invalid(s_new), res)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def invalid(self, s):
|
||||
plus = minus = 0
|
||||
memo = {"(": 1, ")": -1}
|
||||
for c in s:
|
||||
plus += memo.get(c, 0)
|
||||
minus += 1 if plus < 0 else 0
|
||||
plus = max(0, plus)
|
||||
return plus + minus
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class ChallengeInterpreter(JavaScriptInterpreter):
|
||||
|
||||
def __init__(self):
|
||||
super(ChallengeInterpreter, self).__init__('native')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def eval(self, body, domain):
|
||||
|
||||
operators = {
|
||||
'+': op.add,
|
||||
'-': op.sub,
|
||||
'*': op.mul,
|
||||
'/': op.truediv
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def flatten(lists):
|
||||
return sum(map(flatten, lists), []) if isinstance(lists, list) else [lists]
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def jsfuckToNumber(jsFuck):
|
||||
# "Clean Up" JSFuck
|
||||
jsFuck = jsFuck.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0')
|
||||
jsFuck = jsFuck.lstrip('+').replace('(+', '(').replace(' ', '')
|
||||
jsFuck = Parentheses().fix(jsFuck)[0]
|
||||
|
||||
# Hackery Parser for Math
|
||||
stack = []
|
||||
bstack = []
|
||||
|
||||
for i in flatten(pyparsing.nestedExpr().parseString(jsFuck).asList()):
|
||||
if i == '+':
|
||||
stack.append(bstack)
|
||||
bstack = []
|
||||
continue
|
||||
bstack.append(i)
|
||||
stack.append(bstack)
|
||||
|
||||
return int(''.join([str(Calc.doMath(''.join(i))) for i in stack]))
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def divisorMath(payload, needle, domain):
|
||||
jsfuckMath = payload.split('/')
|
||||
if needle in jsfuckMath[1]:
|
||||
expression = re.findall(r"^(.*?)(.)\(function", jsfuckMath[1])[0]
|
||||
|
||||
expression_value = operators[expression[1]](
|
||||
float(jsfuckToNumber(expression[0])),
|
||||
float(ord(domain[jsfuckToNumber(jsfuckMath[1][
|
||||
jsfuckMath[1].find('"("+p+")")}') + len('"("+p+")")}'):-2
|
||||
])]))
|
||||
)
|
||||
else:
|
||||
expression_value = jsfuckToNumber(jsfuckMath[1])
|
||||
|
||||
expression_value = jsfuckToNumber(jsfuckMath[0]) / float(expression_value)
|
||||
|
||||
return expression_value
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def challengeSolve(body, domain):
|
||||
jschl_answer = 0
|
||||
|
||||
try:
|
||||
jsfuckChallenge = re.search(
|
||||
r"setTimeout\(function\(\){\s+var.*?f,\s*(?P<variable>\w+).*?:(?P<init>\S+)};"
|
||||
r".*?\('challenge-form'\);.*?;(?P<challenge>.*?a\.value)\s*=\s*\S+\.toFixed\(10\);",
|
||||
body,
|
||||
re.DOTALL | re.MULTILINE
|
||||
).groupdict()
|
||||
except AttributeError:
|
||||
raise CloudflareSolveError('There was an issue extracting "jsfuckChallenge" from the Cloudflare challenge.')
|
||||
|
||||
kJSFUCK = re.search(r'(;|)\s*k.=(?P<kJSFUCK>\S+);', jsfuckChallenge['challenge'], re.S | re.M)
|
||||
if kJSFUCK:
|
||||
try:
|
||||
kJSFUCK = jsfuckToNumber(kJSFUCK.group('kJSFUCK'))
|
||||
except IndexError:
|
||||
raise CloudflareSolveError('There was an issue extracting "kJSFUCK" from the Cloudflare challenge.')
|
||||
|
||||
try:
|
||||
kID = re.search(r"\s*k\s*=\s*'(?P<kID>\S+)';", body).group('kID')
|
||||
except IndexError:
|
||||
raise CloudflareSolveError('There was an issue extracting "kID" from the Cloudflare challenge.')
|
||||
|
||||
try:
|
||||
r = re.compile(r'<div id="{}(?P<id>\d+)">\s*(?P<jsfuck>[^<>]*)</div>'.format(kID))
|
||||
|
||||
kValues = {}
|
||||
for m in r.finditer(body):
|
||||
kValues[int(m.group('id'))] = m.group('jsfuck')
|
||||
|
||||
jsfuckChallenge['k'] = kValues[kJSFUCK]
|
||||
except (AttributeError, IndexError):
|
||||
raise CloudflareSolveError('There was an issue extracting "kValues" from the Cloudflare challenge.')
|
||||
|
||||
jsfuckChallenge['challenge'] = re.finditer(
|
||||
r'{}.*?([+\-*/])=(.*?);(?=a\.value|{})'.format(
|
||||
jsfuckChallenge['variable'],
|
||||
jsfuckChallenge['variable']
|
||||
),
|
||||
jsfuckChallenge['challenge']
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
if '/' in jsfuckChallenge['init']:
|
||||
val = jsfuckChallenge['init'].split('/')
|
||||
jschl_answer = jsfuckToNumber(val[0]) / float(jsfuckToNumber(val[1]))
|
||||
else:
|
||||
jschl_answer = jsfuckToNumber(jsfuckChallenge['init'])
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
for expressionMatch in jsfuckChallenge['challenge']:
|
||||
oper, expression = expressionMatch.groups()
|
||||
|
||||
if '/' in expression:
|
||||
expression_value = divisorMath(expression, 'function(p)', domain)
|
||||
else:
|
||||
if 'Element' in expression:
|
||||
expression_value = divisorMath(jsfuckChallenge['k'], '"("+p+")")}', domain)
|
||||
else:
|
||||
expression_value = jsfuckToNumber(expression)
|
||||
|
||||
jschl_answer = operators[oper](jschl_answer, expression_value)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
# if not jsfuckChallenge['k'] and '+ t.length' in body:
|
||||
# jschl_answer += len(domain)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
return '{0:.10f}'.format(jschl_answer)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
return challengeSolve(body, domain)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
ChallengeInterpreter()
|
||||
@@ -1,49 +0,0 @@
|
||||
import base64
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from . import JavaScriptInterpreter
|
||||
from .encapsulated import template
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class ChallengeInterpreter(JavaScriptInterpreter):
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def __init__(self):
|
||||
super(ChallengeInterpreter, self).__init__('nodejs')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def eval(self, body, domain):
|
||||
try:
|
||||
js = 'var atob = function(str) {return Buffer.from(str, "base64").toString("binary");};' \
|
||||
'var challenge = atob("%s");' \
|
||||
'var context = {atob: atob};' \
|
||||
'var options = {filename: "iuam-challenge.js", timeout: 4000};' \
|
||||
'var answer = require("vm").runInNewContext(challenge, context, options);' \
|
||||
'process.stdout.write(String(answer));' \
|
||||
% base64.b64encode(template(body, domain).encode('UTF-8')).decode('ascii')
|
||||
|
||||
return subprocess.check_output(['node', '-e', js])
|
||||
|
||||
except OSError as e:
|
||||
if e.errno == 2:
|
||||
raise EnvironmentError(
|
||||
'Missing Node.js runtime. Node is required and must be in the PATH (check with `node -v`).\n\n'
|
||||
'Your Node binary may be called `nodejs` rather than `node`, '
|
||||
'in which case you may need to run `apt-get install nodejs-legacy` on some Debian-based systems.\n\n'
|
||||
'(Please read the cloudscraper README\'s Dependencies section: '
|
||||
'https://github.com/VeNoMouS/cloudscraper#dependencies.)'
|
||||
)
|
||||
raise
|
||||
except Exception:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('Error executing Cloudflare IUAM Javascript in nodejs')
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
ChallengeInterpreter()
|
||||
@@ -1,33 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
try:
|
||||
import v8eval
|
||||
except ImportError:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('Please install the python module v8eval either via pip or download it from https://github.com/sony/v8eval')
|
||||
|
||||
from . import JavaScriptInterpreter
|
||||
from .encapsulated import template
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class ChallengeInterpreter(JavaScriptInterpreter):
|
||||
|
||||
def __init__(self):
|
||||
super(ChallengeInterpreter, self).__init__('v8')
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def eval(self, body, domain):
|
||||
try:
|
||||
return v8eval.V8().eval(template(body, domain))
|
||||
except (TypeError, v8eval.V8Error):
|
||||
RuntimeError('We encountered an error running the V8 Engine.')
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
ChallengeInterpreter()
|
||||
@@ -1,124 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import ssl
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
|
||||
class User_Agent():
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.headers = None
|
||||
self.cipherSuite = []
|
||||
self.loadUserAgent(*args, **kwargs)
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def filterAgents(self, user_agents):
|
||||
filtered = {}
|
||||
|
||||
if self.mobile:
|
||||
if self.platform in user_agents['mobile'] and user_agents['mobile'][self.platform]:
|
||||
filtered.update(user_agents['mobile'][self.platform])
|
||||
|
||||
if self.desktop:
|
||||
if self.platform in user_agents['desktop'] and user_agents['desktop'][self.platform]:
|
||||
filtered.update(user_agents['desktop'][self.platform])
|
||||
|
||||
return filtered
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def tryMatchCustom(self, user_agents):
|
||||
for device_type in user_agents['user_agents']:
|
||||
for platform in user_agents['user_agents'][device_type]:
|
||||
for browser in user_agents['user_agents'][device_type][platform]:
|
||||
if re.search(re.escape(self.custom), ' '.join(user_agents['user_agents'][device_type][platform][browser])):
|
||||
self.headers = user_agents['headers'][browser]
|
||||
self.headers['User-Agent'] = self.custom
|
||||
self.cipherSuite = user_agents['cipherSuite'][browser]
|
||||
return True
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------------------- #
|
||||
|
||||
def loadUserAgent(self, *args, **kwargs):
|
||||
self.browser = kwargs.pop('browser', None)
|
||||
|
||||
self.platforms = ['linux', 'windows', 'darwin', 'android', 'ios']
|
||||
self.browsers = ['chrome', 'firefox']
|
||||
|
||||
if isinstance(self.browser, dict):
|
||||
self.custom = self.browser.get('custom', None)
|
||||
self.platform = self.browser.get('platform', None)
|
||||
self.desktop = self.browser.get('desktop', True)
|
||||
self.mobile = self.browser.get('mobile', True)
|
||||
self.browser = self.browser.get('browser', None)
|
||||
else:
|
||||
self.custom = kwargs.pop('custom', None)
|
||||
self.platform = kwargs.pop('platform', None)
|
||||
self.desktop = kwargs.pop('desktop', True)
|
||||
self.mobile = kwargs.pop('mobile', True)
|
||||
|
||||
if not self.desktop and not self.mobile:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError("Sorry you can't have mobile and desktop disabled at the same time.")
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r') as fp:
|
||||
user_agents = json.load(
|
||||
fp,
|
||||
object_pairs_hook=OrderedDict
|
||||
)
|
||||
|
||||
if self.custom:
|
||||
if not self.tryMatchCustom(user_agents):
|
||||
self.cipherSuite = [
|
||||
ssl._DEFAULT_CIPHERS,
|
||||
'!AES128-SHA',
|
||||
'!ECDHE-RSA-AES256-SHA',
|
||||
]
|
||||
self.headers = OrderedDict([
|
||||
('User-Agent', self.custom),
|
||||
('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'),
|
||||
('Accept-Language', 'en-US,en;q=0.9'),
|
||||
('Accept-Encoding', 'gzip, deflate, br')
|
||||
])
|
||||
else:
|
||||
if self.browser and self.browser not in self.browsers:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('Sorry "{}" browser is not valid, valid browsers are [{}].'.format(self.browser, ', '.join(self.browsers)))
|
||||
|
||||
if not self.platform:
|
||||
self.platform = random.SystemRandom().choice(self.platforms)
|
||||
|
||||
if self.platform not in self.platforms:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('Sorry the platform "{}" is not valid, valid platforms are [{}]'.format(self.platform, ', '.join(self.platforms)))
|
||||
|
||||
filteredAgents = self.filterAgents(user_agents['user_agents'])
|
||||
|
||||
if not self.browser:
|
||||
# has to be at least one in there...
|
||||
while not filteredAgents.get(self.browser):
|
||||
self.browser = random.SystemRandom().choice(list(filteredAgents.keys()))
|
||||
|
||||
if not filteredAgents[self.browser]:
|
||||
sys.tracebacklimit = 0
|
||||
raise RuntimeError('Sorry "{}" browser was not found with a platform of "{}".'.format(self.browser, self.platform))
|
||||
|
||||
self.cipherSuite = user_agents['cipherSuite'][self.browser]
|
||||
self.headers = user_agents['headers'][self.browser]
|
||||
|
||||
self.headers['User-Agent'] = random.SystemRandom().choice(filteredAgents[self.browser])
|
||||
|
||||
if not kwargs.get('allow_brotli', False) and 'br' in self.headers['Accept-Encoding']:
|
||||
self.headers['Accept-Encoding'] = ','.join([
|
||||
encoding for encoding in self.headers['Accept-Encoding'].split(',') if encoding.strip() != 'br'
|
||||
]).strip()
|
||||
File diff suppressed because it is too large
Load Diff
7
lib/pyasn1/__init__.py
Normal file
7
lib/pyasn1/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import sys
|
||||
|
||||
# https://www.python.org/dev/peps/pep-0396/
|
||||
__version__ = '0.4.9'
|
||||
|
||||
if sys.version_info[:2] < (2, 7):
|
||||
raise RuntimeError('PyASN1 requires Python 2.7 or later')
|
||||
2066
lib/pyasn1/codec/ber/decoder.py
Normal file
2066
lib/pyasn1/codec/ber/decoder.py
Normal file
File diff suppressed because it is too large
Load Diff
910
lib/pyasn1/codec/ber/encoder.py
Normal file
910
lib/pyasn1/codec/ber/encoder.py
Normal file
@@ -0,0 +1,910 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import eoo
|
||||
from pyasn1.compat.integer import to_bytes
|
||||
from pyasn1.compat.octets import (int2oct, oct2int, ints2octs, null,
|
||||
str2octs, isOctetsType)
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['Encoder', 'encode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
|
||||
|
||||
|
||||
class AbstractItemEncoder(object):
|
||||
supportIndefLenMode = True
|
||||
|
||||
# An outcome of otherwise legit call `encodeFun(eoo.endOfOctets)`
|
||||
eooIntegerSubstrate = (0, 0)
|
||||
eooOctetsSubstrate = ints2octs(eooIntegerSubstrate)
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def encodeTag(self, singleTag, isConstructed):
|
||||
tagClass, tagFormat, tagId = singleTag
|
||||
encodedTag = tagClass | tagFormat
|
||||
if isConstructed:
|
||||
encodedTag |= tag.tagFormatConstructed
|
||||
|
||||
if tagId < 31:
|
||||
return encodedTag | tagId,
|
||||
|
||||
else:
|
||||
substrate = tagId & 0x7f,
|
||||
|
||||
tagId >>= 7
|
||||
|
||||
while tagId:
|
||||
substrate = (0x80 | (tagId & 0x7f),) + substrate
|
||||
tagId >>= 7
|
||||
|
||||
return (encodedTag | 0x1F,) + substrate
|
||||
|
||||
def encodeLength(self, length, defMode):
|
||||
if not defMode and self.supportIndefLenMode:
|
||||
return (0x80,)
|
||||
|
||||
if length < 0x80:
|
||||
return length,
|
||||
|
||||
else:
|
||||
substrate = ()
|
||||
while length:
|
||||
substrate = (length & 0xff,) + substrate
|
||||
length >>= 8
|
||||
|
||||
substrateLen = len(substrate)
|
||||
|
||||
if substrateLen > 126:
|
||||
raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen)
|
||||
|
||||
return (0x80 | substrateLen,) + substrate
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
raise error.PyAsn1Error('Not implemented')
|
||||
|
||||
def encode(self, value, asn1Spec=None, encodeFun=None, **options):
|
||||
|
||||
if asn1Spec is None:
|
||||
tagSet = value.tagSet
|
||||
else:
|
||||
tagSet = asn1Spec.tagSet
|
||||
|
||||
# untagged item?
|
||||
if not tagSet:
|
||||
substrate, isConstructed, isOctets = self.encodeValue(
|
||||
value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
return substrate
|
||||
|
||||
defMode = options.get('defMode', True)
|
||||
|
||||
substrate = null
|
||||
|
||||
for idx, singleTag in enumerate(tagSet.superTags):
|
||||
|
||||
defModeOverride = defMode
|
||||
|
||||
# base tag?
|
||||
if not idx:
|
||||
try:
|
||||
substrate, isConstructed, isOctets = self.encodeValue(
|
||||
value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
|
||||
except error.PyAsn1Error:
|
||||
exc = sys.exc_info()
|
||||
raise error.PyAsn1Error(
|
||||
'Error encoding %r: %s' % (value, exc[1]))
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %svalue %s into %s' % (
|
||||
isConstructed and 'constructed ' or '', value, substrate
|
||||
))
|
||||
|
||||
if not substrate and isConstructed and options.get('ifNotEmpty', False):
|
||||
return substrate
|
||||
|
||||
if not isConstructed:
|
||||
defModeOverride = True
|
||||
|
||||
if LOG:
|
||||
LOG('overridden encoding mode into definitive for primitive type')
|
||||
|
||||
header = self.encodeTag(singleTag, isConstructed)
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %stag %s into %s' % (
|
||||
isConstructed and 'constructed ' or '',
|
||||
singleTag, debug.hexdump(ints2octs(header))))
|
||||
|
||||
header += self.encodeLength(len(substrate), defModeOverride)
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %s octets (tag + payload) into %s' % (
|
||||
len(substrate), debug.hexdump(ints2octs(header))))
|
||||
|
||||
if isOctets:
|
||||
substrate = ints2octs(header) + substrate
|
||||
|
||||
if not defModeOverride:
|
||||
substrate += self.eooOctetsSubstrate
|
||||
|
||||
else:
|
||||
substrate = header + substrate
|
||||
|
||||
if not defModeOverride:
|
||||
substrate += self.eooIntegerSubstrate
|
||||
|
||||
if not isOctets:
|
||||
substrate = ints2octs(substrate)
|
||||
|
||||
return substrate
|
||||
|
||||
|
||||
class EndOfOctetsEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return null, False, True
|
||||
|
||||
|
||||
class BooleanEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return value and (1,) or (0,), False, False
|
||||
|
||||
|
||||
class IntegerEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
supportCompactZero = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if value == 0:
|
||||
if LOG:
|
||||
LOG('encoding %spayload for zero INTEGER' % (
|
||||
self.supportCompactZero and 'no ' or ''
|
||||
))
|
||||
|
||||
# de-facto way to encode zero
|
||||
if self.supportCompactZero:
|
||||
return (), False, False
|
||||
else:
|
||||
return (0,), False, False
|
||||
|
||||
return to_bytes(int(value), signed=True), False, True
|
||||
|
||||
|
||||
class BitStringEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
# TODO: try to avoid ASN.1 schema instantiation
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
valueLength = len(value)
|
||||
if valueLength % 8:
|
||||
alignedValue = value << (8 - valueLength % 8)
|
||||
else:
|
||||
alignedValue = value
|
||||
|
||||
maxChunkSize = options.get('maxChunkSize', 0)
|
||||
if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8:
|
||||
substrate = alignedValue.asOctets()
|
||||
return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True
|
||||
|
||||
if LOG:
|
||||
LOG('encoding into up to %s-octet chunks' % maxChunkSize)
|
||||
|
||||
baseTag = value.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
if baseTag:
|
||||
tagSet = tag.TagSet(baseTag, baseTag)
|
||||
|
||||
else:
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
alignedValue = alignedValue.clone(tagSet=tagSet)
|
||||
|
||||
stop = 0
|
||||
substrate = null
|
||||
while stop < valueLength:
|
||||
start = stop
|
||||
stop = min(start + maxChunkSize * 8, valueLength)
|
||||
substrate += encodeFun(alignedValue[start:stop], asn1Spec, **options)
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class OctetStringEncoder(AbstractItemEncoder):
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if asn1Spec is None:
|
||||
substrate = value.asOctets()
|
||||
|
||||
elif not isOctetsType(value):
|
||||
substrate = asn1Spec.clone(value).asOctets()
|
||||
|
||||
else:
|
||||
substrate = value
|
||||
|
||||
maxChunkSize = options.get('maxChunkSize', 0)
|
||||
|
||||
if not maxChunkSize or len(substrate) <= maxChunkSize:
|
||||
return substrate, False, True
|
||||
|
||||
if LOG:
|
||||
LOG('encoding into up to %s-octet chunks' % maxChunkSize)
|
||||
|
||||
# strip off explicit tags for inner chunks
|
||||
|
||||
if asn1Spec is None:
|
||||
baseTag = value.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
if baseTag:
|
||||
tagSet = tag.TagSet(baseTag, baseTag)
|
||||
|
||||
else:
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
asn1Spec = value.clone(tagSet=tagSet)
|
||||
|
||||
elif not isOctetsType(value):
|
||||
baseTag = asn1Spec.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
if baseTag:
|
||||
tagSet = tag.TagSet(baseTag, baseTag)
|
||||
|
||||
else:
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
asn1Spec = asn1Spec.clone(tagSet=tagSet)
|
||||
|
||||
pos = 0
|
||||
substrate = null
|
||||
|
||||
while True:
|
||||
chunk = value[pos:pos + maxChunkSize]
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
substrate += encodeFun(chunk, asn1Spec, **options)
|
||||
pos += maxChunkSize
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class NullEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return null, False, True
|
||||
|
||||
|
||||
class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
oid = value.asTuple()
|
||||
|
||||
# Build the first pair
|
||||
try:
|
||||
first = oid[0]
|
||||
second = oid[1]
|
||||
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Short OID %s' % (value,))
|
||||
|
||||
if 0 <= second <= 39:
|
||||
if first == 1:
|
||||
oid = (second + 40,) + oid[2:]
|
||||
elif first == 0:
|
||||
oid = (second,) + oid[2:]
|
||||
elif first == 2:
|
||||
oid = (second + 80,) + oid[2:]
|
||||
else:
|
||||
raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
|
||||
|
||||
elif first == 2:
|
||||
oid = (second + 80,) + oid[2:]
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
|
||||
|
||||
octets = ()
|
||||
|
||||
# Cycle through subIds
|
||||
for subOid in oid:
|
||||
if 0 <= subOid <= 127:
|
||||
# Optimize for the common case
|
||||
octets += (subOid,)
|
||||
|
||||
elif subOid > 127:
|
||||
# Pack large Sub-Object IDs
|
||||
res = (subOid & 0x7f,)
|
||||
subOid >>= 7
|
||||
|
||||
while subOid:
|
||||
res = (0x80 | (subOid & 0x7f),) + res
|
||||
subOid >>= 7
|
||||
|
||||
# Add packed Sub-Object ID to resulted Object ID
|
||||
octets += res
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value))
|
||||
|
||||
return octets, False, False
|
||||
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
binEncBase = 2 # set to None to choose encoding base automatically
|
||||
|
||||
@staticmethod
|
||||
def _dropFloatingPoint(m, encbase, e):
|
||||
ms, es = 1, 1
|
||||
if m < 0:
|
||||
ms = -1 # mantissa sign
|
||||
|
||||
if e < 0:
|
||||
es = -1 # exponent sign
|
||||
|
||||
m *= ms
|
||||
|
||||
if encbase == 8:
|
||||
m *= 2 ** (abs(e) % 3 * es)
|
||||
e = abs(e) // 3 * es
|
||||
|
||||
elif encbase == 16:
|
||||
m *= 2 ** (abs(e) % 4 * es)
|
||||
e = abs(e) // 4 * es
|
||||
|
||||
while True:
|
||||
if int(m) != m:
|
||||
m *= encbase
|
||||
e -= 1
|
||||
continue
|
||||
break
|
||||
|
||||
return ms, int(m), encbase, e
|
||||
|
||||
def _chooseEncBase(self, value):
|
||||
m, b, e = value
|
||||
encBase = [2, 8, 16]
|
||||
if value.binEncBase in encBase:
|
||||
return self._dropFloatingPoint(m, value.binEncBase, e)
|
||||
|
||||
elif self.binEncBase in encBase:
|
||||
return self._dropFloatingPoint(m, self.binEncBase, e)
|
||||
|
||||
# auto choosing base 2/8/16
|
||||
mantissa = [m, m, m]
|
||||
exponent = [e, e, e]
|
||||
sign = 1
|
||||
encbase = 2
|
||||
e = float('inf')
|
||||
|
||||
for i in range(3):
|
||||
(sign,
|
||||
mantissa[i],
|
||||
encBase[i],
|
||||
exponent[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponent[i])
|
||||
|
||||
if abs(exponent[i]) < abs(e) or (abs(exponent[i]) == abs(e) and mantissa[i] < m):
|
||||
e = exponent[i]
|
||||
m = int(mantissa[i])
|
||||
encbase = encBase[i]
|
||||
|
||||
if LOG:
|
||||
LOG('automatically chosen REAL encoding base %s, sign %s, mantissa %s, '
|
||||
'exponent %s' % (encbase, sign, m, e))
|
||||
|
||||
return sign, m, encbase, e
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
if value.isPlusInf:
|
||||
return (0x40,), False, False
|
||||
|
||||
if value.isMinusInf:
|
||||
return (0x41,), False, False
|
||||
|
||||
m, b, e = value
|
||||
|
||||
if not m:
|
||||
return null, False, True
|
||||
|
||||
if b == 10:
|
||||
if LOG:
|
||||
LOG('encoding REAL into character form')
|
||||
|
||||
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True
|
||||
|
||||
elif b == 2:
|
||||
fo = 0x80 # binary encoding
|
||||
ms, m, encbase, e = self._chooseEncBase(value)
|
||||
|
||||
if ms < 0: # mantissa sign
|
||||
fo |= 0x40 # sign bit
|
||||
|
||||
# exponent & mantissa normalization
|
||||
if encbase == 2:
|
||||
while m & 0x1 == 0:
|
||||
m >>= 1
|
||||
e += 1
|
||||
|
||||
elif encbase == 8:
|
||||
while m & 0x7 == 0:
|
||||
m >>= 3
|
||||
e += 1
|
||||
fo |= 0x10
|
||||
|
||||
else: # encbase = 16
|
||||
while m & 0xf == 0:
|
||||
m >>= 4
|
||||
e += 1
|
||||
fo |= 0x20
|
||||
|
||||
sf = 0 # scale factor
|
||||
|
||||
while m & 0x1 == 0:
|
||||
m >>= 1
|
||||
sf += 1
|
||||
|
||||
if sf > 3:
|
||||
raise error.PyAsn1Error('Scale factor overflow') # bug if raised
|
||||
|
||||
fo |= sf << 2
|
||||
eo = null
|
||||
if e == 0 or e == -1:
|
||||
eo = int2oct(e & 0xff)
|
||||
|
||||
else:
|
||||
while e not in (0, -1):
|
||||
eo = int2oct(e & 0xff) + eo
|
||||
e >>= 8
|
||||
|
||||
if e == 0 and eo and oct2int(eo[0]) & 0x80:
|
||||
eo = int2oct(0) + eo
|
||||
|
||||
if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
|
||||
eo = int2oct(0xff) + eo
|
||||
|
||||
n = len(eo)
|
||||
if n > 0xff:
|
||||
raise error.PyAsn1Error('Real exponent overflow')
|
||||
|
||||
if n == 1:
|
||||
pass
|
||||
|
||||
elif n == 2:
|
||||
fo |= 1
|
||||
|
||||
elif n == 3:
|
||||
fo |= 2
|
||||
|
||||
else:
|
||||
fo |= 3
|
||||
eo = int2oct(n & 0xff) + eo
|
||||
|
||||
po = null
|
||||
|
||||
while m:
|
||||
po = int2oct(m & 0xff) + po
|
||||
m >>= 8
|
||||
|
||||
substrate = int2oct(fo) + eo + po
|
||||
|
||||
return substrate, False, True
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Prohibited Real base %s' % b)
|
||||
|
||||
|
||||
class SequenceEncoder(AbstractItemEncoder):
|
||||
omitEmptyOptionals = False
|
||||
|
||||
# TODO: handling three flavors of input is too much -- split over codecs
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
substrate = null
|
||||
|
||||
omitEmptyOptionals = options.get(
|
||||
'omitEmptyOptionals', self.omitEmptyOptionals)
|
||||
|
||||
if LOG:
|
||||
LOG('%sencoding empty OPTIONAL components' % (
|
||||
omitEmptyOptionals and 'not ' or ''))
|
||||
|
||||
if asn1Spec is None:
|
||||
# instance of ASN.1 schema
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
|
||||
for idx, component in enumerate(value.values()):
|
||||
if namedTypes:
|
||||
namedType = namedTypes[idx]
|
||||
|
||||
if namedType.isOptional and not component.isValue:
|
||||
if LOG:
|
||||
LOG('not encoding OPTIONAL component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
if LOG:
|
||||
LOG('not encoding DEFAULT component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if omitEmptyOptionals:
|
||||
options.update(ifNotEmpty=namedType.isOptional)
|
||||
|
||||
# wrap open type blob if needed
|
||||
if namedTypes and namedType.openType:
|
||||
|
||||
wrapType = namedType.asn1Object
|
||||
|
||||
if wrapType.typeId in (
|
||||
univ.SetOf.typeId, univ.SequenceOf.typeId):
|
||||
|
||||
substrate += encodeFun(
|
||||
component, asn1Spec,
|
||||
**dict(options, wrapType=wrapType.componentType))
|
||||
|
||||
else:
|
||||
chunk = encodeFun(component, asn1Spec, **options)
|
||||
|
||||
if wrapType.isSameTypeWith(component):
|
||||
substrate += chunk
|
||||
|
||||
else:
|
||||
substrate += encodeFun(chunk, wrapType, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('wrapped with wrap type %r' % (wrapType,))
|
||||
|
||||
else:
|
||||
substrate += encodeFun(component, asn1Spec, **options)
|
||||
|
||||
else:
|
||||
# bare Python value + ASN.1 schema
|
||||
for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
|
||||
|
||||
try:
|
||||
component = value[namedType.name]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Component name "%s" not found in %r' % (
|
||||
namedType.name, value))
|
||||
|
||||
if namedType.isOptional and namedType.name not in value:
|
||||
if LOG:
|
||||
LOG('not encoding OPTIONAL component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
if LOG:
|
||||
LOG('not encoding DEFAULT component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if omitEmptyOptionals:
|
||||
options.update(ifNotEmpty=namedType.isOptional)
|
||||
|
||||
componentSpec = namedType.asn1Object
|
||||
|
||||
# wrap open type blob if needed
|
||||
if namedType.openType:
|
||||
|
||||
if componentSpec.typeId in (
|
||||
univ.SetOf.typeId, univ.SequenceOf.typeId):
|
||||
|
||||
substrate += encodeFun(
|
||||
component, componentSpec,
|
||||
**dict(options, wrapType=componentSpec.componentType))
|
||||
|
||||
else:
|
||||
chunk = encodeFun(component, componentSpec, **options)
|
||||
|
||||
if componentSpec.isSameTypeWith(component):
|
||||
substrate += chunk
|
||||
|
||||
else:
|
||||
substrate += encodeFun(chunk, componentSpec, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('wrapped with wrap type %r' % (componentSpec,))
|
||||
|
||||
else:
|
||||
substrate += encodeFun(component, componentSpec, **options)
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class SequenceOfEncoder(AbstractItemEncoder):
|
||||
def _encodeComponents(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if asn1Spec is None:
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
else:
|
||||
asn1Spec = asn1Spec.componentType
|
||||
|
||||
chunks = []
|
||||
|
||||
wrapType = options.pop('wrapType', None)
|
||||
|
||||
for idx, component in enumerate(value):
|
||||
chunk = encodeFun(component, asn1Spec, **options)
|
||||
|
||||
if (wrapType is not None and
|
||||
not wrapType.isSameTypeWith(component)):
|
||||
# wrap encoded value with wrapper container (e.g. ANY)
|
||||
chunk = encodeFun(chunk, wrapType, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('wrapped with wrap type %r' % (wrapType,))
|
||||
|
||||
chunks.append(chunk)
|
||||
|
||||
return chunks
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class ChoiceEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is None:
|
||||
component = value.getComponent()
|
||||
else:
|
||||
names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
|
||||
if namedType.name in value]
|
||||
if len(names) != 1:
|
||||
raise error.PyAsn1Error('%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', value))
|
||||
|
||||
name = names[0]
|
||||
|
||||
component = value[name]
|
||||
asn1Spec = asn1Spec[name]
|
||||
|
||||
return encodeFun(component, asn1Spec, **options), True, True
|
||||
|
||||
|
||||
class AnyEncoder(OctetStringEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is None:
|
||||
value = value.asOctets()
|
||||
elif not isOctetsType(value):
|
||||
value = asn1Spec.clone(value).asOctets()
|
||||
|
||||
return value, not options.get('defMode', True), True
|
||||
|
||||
|
||||
TAG_MAP = {
|
||||
eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SequenceOf.tagSet: SequenceOfEncoder(),
|
||||
univ.SetOf.tagSet: SequenceOfEncoder(),
|
||||
univ.Choice.tagSet: ChoiceEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.tagSet: OctetStringEncoder(),
|
||||
char.NumericString.tagSet: OctetStringEncoder(),
|
||||
char.PrintableString.tagSet: OctetStringEncoder(),
|
||||
char.TeletexString.tagSet: OctetStringEncoder(),
|
||||
char.VideotexString.tagSet: OctetStringEncoder(),
|
||||
char.IA5String.tagSet: OctetStringEncoder(),
|
||||
char.GraphicString.tagSet: OctetStringEncoder(),
|
||||
char.VisibleString.tagSet: OctetStringEncoder(),
|
||||
char.GeneralString.tagSet: OctetStringEncoder(),
|
||||
char.UniversalString.tagSet: OctetStringEncoder(),
|
||||
char.BMPString.tagSet: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
|
||||
useful.UTCTime.tagSet: OctetStringEncoder()
|
||||
}
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
TYPE_MAP = {
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Integer.typeId: IntegerEncoder(),
|
||||
univ.BitString.typeId: BitStringEncoder(),
|
||||
univ.OctetString.typeId: OctetStringEncoder(),
|
||||
univ.Null.typeId: NullEncoder(),
|
||||
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.typeId: IntegerEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.Set.typeId: SequenceEncoder(),
|
||||
univ.SetOf.typeId: SequenceOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder(),
|
||||
univ.Choice.typeId: ChoiceEncoder(),
|
||||
univ.Any.typeId: AnyEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: OctetStringEncoder(),
|
||||
char.NumericString.typeId: OctetStringEncoder(),
|
||||
char.PrintableString.typeId: OctetStringEncoder(),
|
||||
char.TeletexString.typeId: OctetStringEncoder(),
|
||||
char.VideotexString.typeId: OctetStringEncoder(),
|
||||
char.IA5String.typeId: OctetStringEncoder(),
|
||||
char.GraphicString.typeId: OctetStringEncoder(),
|
||||
char.VisibleString.typeId: OctetStringEncoder(),
|
||||
char.GeneralString.typeId: OctetStringEncoder(),
|
||||
char.UniversalString.typeId: OctetStringEncoder(),
|
||||
char.BMPString.typeId: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.typeId: OctetStringEncoder(),
|
||||
useful.UTCTime.typeId: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
class SingleItemEncoder(object):
|
||||
fixedDefLengthMode = None
|
||||
fixedChunkSize = None
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
def __init__(self, **options):
|
||||
self._tagMap = options.get('tagMap', self.TAG_MAP)
|
||||
self._typeMap = options.get('typeMap', self.TYPE_MAP)
|
||||
|
||||
def __call__(self, value, asn1Spec=None, **options):
|
||||
try:
|
||||
if asn1Spec is None:
|
||||
typeId = value.typeId
|
||||
else:
|
||||
typeId = asn1Spec.typeId
|
||||
|
||||
except AttributeError:
|
||||
raise error.PyAsn1Error('Value %r is not ASN.1 type instance '
|
||||
'and "asn1Spec" not given' % (value,))
|
||||
|
||||
if LOG:
|
||||
LOG('encoder called in %sdef mode, chunk size %s for type %s, '
|
||||
'value:\n%s' % (not options.get('defMode', True) and 'in' or '',
|
||||
options.get('maxChunkSize', 0),
|
||||
asn1Spec is None and value.prettyPrintType() or
|
||||
asn1Spec.prettyPrintType(), value))
|
||||
|
||||
if self.fixedDefLengthMode is not None:
|
||||
options.update(defMode=self.fixedDefLengthMode)
|
||||
|
||||
if self.fixedChunkSize is not None:
|
||||
options.update(maxChunkSize=self.fixedChunkSize)
|
||||
|
||||
try:
|
||||
concreteEncoder = self._typeMap[typeId]
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by type ID '
|
||||
'%s' % (concreteEncoder.__class__.__name__, typeId))
|
||||
|
||||
except KeyError:
|
||||
if asn1Spec is None:
|
||||
tagSet = value.tagSet
|
||||
else:
|
||||
tagSet = asn1Spec.tagSet
|
||||
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(tagSet.baseTag, tagSet.baseTag)
|
||||
|
||||
try:
|
||||
concreteEncoder = self._tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet))
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by tagSet '
|
||||
'%s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
|
||||
substrate = concreteEncoder.encode(value, asn1Spec, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('codec %s built %s octets of substrate: %s\nencoder '
|
||||
'completed' % (concreteEncoder, len(substrate),
|
||||
debug.hexdump(substrate)))
|
||||
|
||||
return substrate
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
def __init__(self, **options):
|
||||
self._singleItemEncoder = self.SINGLE_ITEM_ENCODER(**options)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec=None, **options):
|
||||
return self._singleItemEncoder(
|
||||
pyObject, asn1Spec=asn1Spec, **options)
|
||||
|
||||
|
||||
#: Turns ASN.1 object into BER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a BER octet stream.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
|
||||
#: parameter is required to guide the encoding process.
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec:
|
||||
#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#:
|
||||
#: defMode: :py:class:`bool`
|
||||
#: If :obj:`False`, produces indefinite length encoding
|
||||
#:
|
||||
#: maxChunkSize: :py:class:`int`
|
||||
#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octetstream
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode Python value into BER with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> encode([1, 2, 3], asn1Spec=seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
#: Encode ASN.1 value object into BER
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
encode = Encoder()
|
||||
28
lib/pyasn1/codec/ber/eoo.py
Normal file
28
lib/pyasn1/codec/ber/eoo.py
Normal file
@@ -0,0 +1,28 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import tag
|
||||
|
||||
__all__ = ['endOfOctets']
|
||||
|
||||
|
||||
class EndOfOctets(base.SimpleAsn1Type):
|
||||
defaultValue = 0
|
||||
tagSet = tag.initTagSet(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
|
||||
)
|
||||
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if cls._instance is None:
|
||||
cls._instance = object.__new__(cls, *args, **kwargs)
|
||||
|
||||
return cls._instance
|
||||
|
||||
|
||||
endOfOctets = EndOfOctets()
|
||||
142
lib/pyasn1/codec/cer/decoder.py
Normal file
142
lib/pyasn1/codec/cer/decoder.py
Normal file
@@ -0,0 +1,142 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.streaming import readFromStream
|
||||
from pyasn1.codec.ber import decoder
|
||||
from pyasn1.compat.octets import oct2int
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['decode', 'StreamingDecoder']
|
||||
|
||||
SubstrateUnderrunError = error.SubstrateUnderrunError
|
||||
|
||||
|
||||
class BooleanPayloadDecoder(decoder.AbstractSimplePayloadDecoder):
|
||||
protoComponent = univ.Boolean(0)
|
||||
|
||||
def valueDecoder(self, substrate, asn1Spec,
|
||||
tagSet=None, length=None, state=None,
|
||||
decodeFun=None, substrateFun=None,
|
||||
**options):
|
||||
|
||||
if length != 1:
|
||||
raise error.PyAsn1Error('Not single-octet Boolean payload')
|
||||
|
||||
for chunk in readFromStream(substrate, length, options):
|
||||
if isinstance(chunk, SubstrateUnderrunError):
|
||||
yield chunk
|
||||
|
||||
byte = oct2int(chunk[0])
|
||||
|
||||
# CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
|
||||
# BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
|
||||
# in https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
|
||||
if byte == 0xff:
|
||||
value = 1
|
||||
|
||||
elif byte == 0x00:
|
||||
value = 0
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
|
||||
|
||||
yield self._createComponent(asn1Spec, tagSet, value, **options)
|
||||
|
||||
|
||||
# TODO: prohibit non-canonical encoding
|
||||
BitStringPayloadDecoder = decoder.BitStringPayloadDecoder
|
||||
OctetStringPayloadDecoder = decoder.OctetStringPayloadDecoder
|
||||
RealPayloadDecoder = decoder.RealPayloadDecoder
|
||||
|
||||
TAG_MAP = decoder.TAG_MAP.copy()
|
||||
TAG_MAP.update(
|
||||
{univ.Boolean.tagSet: BooleanPayloadDecoder(),
|
||||
univ.BitString.tagSet: BitStringPayloadDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringPayloadDecoder(),
|
||||
univ.Real.tagSet: RealPayloadDecoder()}
|
||||
)
|
||||
|
||||
TYPE_MAP = decoder.TYPE_MAP.copy()
|
||||
|
||||
# Put in non-ambiguous types for faster codec lookup
|
||||
for typeDecoder in TAG_MAP.values():
|
||||
if typeDecoder.protoComponent is not None:
|
||||
typeId = typeDecoder.protoComponent.__class__.typeId
|
||||
if typeId is not None and typeId not in TYPE_MAP:
|
||||
TYPE_MAP[typeId] = typeDecoder
|
||||
|
||||
|
||||
class SingleItemDecoder(decoder.SingleItemDecoder):
|
||||
__doc__ = decoder.SingleItemDecoder.__doc__
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
|
||||
class StreamingDecoder(decoder.StreamingDecoder):
|
||||
__doc__ = decoder.StreamingDecoder.__doc__
|
||||
|
||||
SINGLE_ITEM_DECODER = SingleItemDecoder
|
||||
|
||||
|
||||
class Decoder(decoder.Decoder):
|
||||
__doc__ = decoder.Decoder.__doc__
|
||||
|
||||
STREAMING_DECODER = StreamingDecoder
|
||||
|
||||
|
||||
#: Turns CER octet stream into an ASN.1 object.
|
||||
#:
|
||||
#: Takes CER octet-stream and decode it into an ASN.1 object
|
||||
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
|
||||
#: may be a scalar or an arbitrary nested structure.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: CER octet-stream
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
|
||||
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
|
||||
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`tuple`
|
||||
#: A tuple of pyasn1 object recovered from CER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: and the unprocessed trailing portion of the *substrate* (may be empty)
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
|
||||
#: On decoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Decode CER serialisation without ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> s, _ = decode(b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00')
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
#: Decode CER serialisation with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> s, _ = decode(b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00', asn1Spec=seq)
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder()
|
||||
323
lib/pyasn1/codec/cer/encoder.py
Normal file
323
lib/pyasn1/codec/cer/encoder.py
Normal file
@@ -0,0 +1,323 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import encoder
|
||||
from pyasn1.compat.octets import str2octs, null
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['Encoder', 'encode']
|
||||
|
||||
|
||||
class BooleanEncoder(encoder.IntegerEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if value == 0:
|
||||
substrate = (0,)
|
||||
else:
|
||||
substrate = (255,)
|
||||
return substrate, False, False
|
||||
|
||||
|
||||
class RealEncoder(encoder.RealEncoder):
|
||||
def _chooseEncBase(self, value):
|
||||
m, b, e = value
|
||||
return self._dropFloatingPoint(m, b, e)
|
||||
|
||||
|
||||
# specialized GeneralStringEncoder here
|
||||
|
||||
class TimeEncoderMixIn(object):
|
||||
Z_CHAR = ord('Z')
|
||||
PLUS_CHAR = ord('+')
|
||||
MINUS_CHAR = ord('-')
|
||||
COMMA_CHAR = ord(',')
|
||||
DOT_CHAR = ord('.')
|
||||
ZERO_CHAR = ord('0')
|
||||
|
||||
MIN_LENGTH = 12
|
||||
MAX_LENGTH = 19
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
# CER encoding constraints:
|
||||
# - minutes are mandatory, seconds are optional
|
||||
# - sub-seconds must NOT be zero / no meaningless zeros
|
||||
# - no hanging fraction dot
|
||||
# - time in UTC (Z)
|
||||
# - only dot is allowed for fractions
|
||||
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
numbers = value.asNumbers()
|
||||
|
||||
if self.PLUS_CHAR in numbers or self.MINUS_CHAR in numbers:
|
||||
raise error.PyAsn1Error('Must be UTC time: %r' % value)
|
||||
|
||||
if numbers[-1] != self.Z_CHAR:
|
||||
raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % value)
|
||||
|
||||
if self.COMMA_CHAR in numbers:
|
||||
raise error.PyAsn1Error('Comma in fractions disallowed: %r' % value)
|
||||
|
||||
if self.DOT_CHAR in numbers:
|
||||
|
||||
isModified = False
|
||||
|
||||
numbers = list(numbers)
|
||||
|
||||
searchIndex = min(numbers.index(self.DOT_CHAR) + 4, len(numbers) - 1)
|
||||
|
||||
while numbers[searchIndex] != self.DOT_CHAR:
|
||||
if numbers[searchIndex] == self.ZERO_CHAR:
|
||||
del numbers[searchIndex]
|
||||
isModified = True
|
||||
|
||||
searchIndex -= 1
|
||||
|
||||
searchIndex += 1
|
||||
|
||||
if searchIndex < len(numbers):
|
||||
if numbers[searchIndex] == self.Z_CHAR:
|
||||
# drop hanging comma
|
||||
del numbers[searchIndex - 1]
|
||||
isModified = True
|
||||
|
||||
if isModified:
|
||||
value = value.clone(numbers)
|
||||
|
||||
if not self.MIN_LENGTH < len(numbers) < self.MAX_LENGTH:
|
||||
raise error.PyAsn1Error('Length constraint violated: %r' % value)
|
||||
|
||||
options.update(maxChunkSize=1000)
|
||||
|
||||
return encoder.OctetStringEncoder.encodeValue(
|
||||
self, value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
|
||||
|
||||
class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
|
||||
MIN_LENGTH = 12
|
||||
MAX_LENGTH = 20
|
||||
|
||||
|
||||
class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
|
||||
MIN_LENGTH = 10
|
||||
MAX_LENGTH = 14
|
||||
|
||||
|
||||
class SetOfEncoder(encoder.SequenceOfEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
# sort by serialised and padded components
|
||||
if len(chunks) > 1:
|
||||
zero = str2octs('\x00')
|
||||
maxLen = max(map(len, chunks))
|
||||
paddedChunks = [
|
||||
(x.ljust(maxLen, zero), x) for x in chunks
|
||||
]
|
||||
paddedChunks.sort(key=lambda x: x[0])
|
||||
|
||||
chunks = [x[1] for x in paddedChunks]
|
||||
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class SequenceOfEncoder(encoder.SequenceOfEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if options.get('ifNotEmpty', False) and not len(value):
|
||||
return null, True, True
|
||||
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class SetEncoder(encoder.SequenceEncoder):
|
||||
@staticmethod
|
||||
def _componentSortKey(componentAndType):
|
||||
"""Sort SET components by tag
|
||||
|
||||
Sort regardless of the Choice value (static sort)
|
||||
"""
|
||||
component, asn1Spec = componentAndType
|
||||
|
||||
if asn1Spec is None:
|
||||
asn1Spec = component
|
||||
|
||||
if asn1Spec.typeId == univ.Choice.typeId and not asn1Spec.tagSet:
|
||||
if asn1Spec.tagSet:
|
||||
return asn1Spec.tagSet
|
||||
else:
|
||||
return asn1Spec.componentType.minTagSet
|
||||
else:
|
||||
return asn1Spec.tagSet
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
substrate = null
|
||||
|
||||
comps = []
|
||||
compsMap = {}
|
||||
|
||||
if asn1Spec is None:
|
||||
# instance of ASN.1 schema
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
|
||||
for idx, component in enumerate(value.values()):
|
||||
if namedTypes:
|
||||
namedType = namedTypes[idx]
|
||||
|
||||
if namedType.isOptional and not component.isValue:
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
continue
|
||||
|
||||
compsMap[id(component)] = namedType
|
||||
|
||||
else:
|
||||
compsMap[id(component)] = None
|
||||
|
||||
comps.append((component, asn1Spec))
|
||||
|
||||
else:
|
||||
# bare Python value + ASN.1 schema
|
||||
for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
|
||||
|
||||
try:
|
||||
component = value[namedType.name]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Component name "%s" not found in %r' % (namedType.name, value))
|
||||
|
||||
if namedType.isOptional and namedType.name not in value:
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
continue
|
||||
|
||||
compsMap[id(component)] = namedType
|
||||
comps.append((component, asn1Spec[idx]))
|
||||
|
||||
for comp, compType in sorted(comps, key=self._componentSortKey):
|
||||
namedType = compsMap[id(comp)]
|
||||
|
||||
if namedType:
|
||||
options.update(ifNotEmpty=namedType.isOptional)
|
||||
|
||||
chunk = encodeFun(comp, compType, **options)
|
||||
|
||||
# wrap open type blob if needed
|
||||
if namedType and namedType.openType:
|
||||
wrapType = namedType.asn1Object
|
||||
if wrapType.tagSet and not wrapType.isSameTypeWith(comp):
|
||||
chunk = encodeFun(chunk, wrapType, **options)
|
||||
|
||||
substrate += chunk
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class SequenceEncoder(encoder.SequenceEncoder):
|
||||
omitEmptyOptionals = True
|
||||
|
||||
|
||||
TAG_MAP = encoder.TAG_MAP.copy()
|
||||
|
||||
TAG_MAP.update({
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
|
||||
useful.UTCTime.tagSet: UTCTimeEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SetOf.tagSet: SetOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder()
|
||||
})
|
||||
|
||||
TYPE_MAP = encoder.TYPE_MAP.copy()
|
||||
|
||||
TYPE_MAP.update({
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(),
|
||||
useful.UTCTime.typeId: UTCTimeEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.Set.typeId: SetEncoder(),
|
||||
univ.SetOf.typeId: SetOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder()
|
||||
})
|
||||
|
||||
|
||||
class SingleItemEncoder(encoder.SingleItemEncoder):
|
||||
fixedDefLengthMode = False
|
||||
fixedChunkSize = 1000
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
|
||||
#: Turns ASN.1 object into CER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a CER octet stream.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
|
||||
#: parameter is required to guide the encoding process.
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec:
|
||||
#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octet-stream
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode Python value into CER with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> encode([1, 2, 3], asn1Spec=seq)
|
||||
#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
|
||||
#:
|
||||
#: Encode ASN.1 value object into CER
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
|
||||
#:
|
||||
encode = Encoder()
|
||||
|
||||
# EncoderFactory queries class instance and builds a map of tags -> encoders
|
||||
112
lib/pyasn1/codec/der/decoder.py
Normal file
112
lib/pyasn1/codec/der/decoder.py
Normal file
@@ -0,0 +1,112 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.codec.cer import decoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['decode', 'StreamingDecoder']
|
||||
|
||||
|
||||
class BitStringPayloadDecoder(decoder.BitStringPayloadDecoder):
|
||||
supportConstructedForm = False
|
||||
|
||||
|
||||
class OctetStringPayloadDecoder(decoder.OctetStringPayloadDecoder):
|
||||
supportConstructedForm = False
|
||||
|
||||
|
||||
# TODO: prohibit non-canonical encoding
|
||||
RealPayloadDecoder = decoder.RealPayloadDecoder
|
||||
|
||||
TAG_MAP = decoder.TAG_MAP.copy()
|
||||
TAG_MAP.update(
|
||||
{univ.BitString.tagSet: BitStringPayloadDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringPayloadDecoder(),
|
||||
univ.Real.tagSet: RealPayloadDecoder()}
|
||||
)
|
||||
|
||||
TYPE_MAP = decoder.TYPE_MAP.copy()
|
||||
|
||||
# Put in non-ambiguous types for faster codec lookup
|
||||
for typeDecoder in TAG_MAP.values():
|
||||
if typeDecoder.protoComponent is not None:
|
||||
typeId = typeDecoder.protoComponent.__class__.typeId
|
||||
if typeId is not None and typeId not in TYPE_MAP:
|
||||
TYPE_MAP[typeId] = typeDecoder
|
||||
|
||||
|
||||
class SingleItemDecoder(decoder.SingleItemDecoder):
|
||||
__doc__ = decoder.SingleItemDecoder.__doc__
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
supportIndefLength = False
|
||||
|
||||
|
||||
class StreamingDecoder(decoder.StreamingDecoder):
|
||||
__doc__ = decoder.StreamingDecoder.__doc__
|
||||
|
||||
SINGLE_ITEM_DECODER = SingleItemDecoder
|
||||
|
||||
|
||||
class Decoder(decoder.Decoder):
|
||||
__doc__ = decoder.Decoder.__doc__
|
||||
|
||||
STREAMING_DECODER = StreamingDecoder
|
||||
|
||||
|
||||
#: Turns DER octet stream into an ASN.1 object.
|
||||
#:
|
||||
#: Takes DER octet-stream and decode it into an ASN.1 object
|
||||
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
|
||||
#: may be a scalar or an arbitrary nested structure.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: DER octet-stream
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
|
||||
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
|
||||
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`tuple`
|
||||
#: A tuple of pyasn1 object recovered from DER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: and the unprocessed trailing portion of the *substrate* (may be empty)
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
|
||||
#: On decoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Decode DER serialisation without ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03')
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
#: Decode DER serialisation with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq)
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder()
|
||||
118
lib/pyasn1/codec/der/encoder.py
Normal file
118
lib/pyasn1/codec/der/encoder.py
Normal file
@@ -0,0 +1,118 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.cer import encoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['Encoder', 'encode']
|
||||
|
||||
|
||||
class SetEncoder(encoder.SetEncoder):
|
||||
@staticmethod
|
||||
def _componentSortKey(componentAndType):
|
||||
"""Sort SET components by tag
|
||||
|
||||
Sort depending on the actual Choice value (dynamic sort)
|
||||
"""
|
||||
component, asn1Spec = componentAndType
|
||||
|
||||
if asn1Spec is None:
|
||||
compType = component
|
||||
else:
|
||||
compType = asn1Spec
|
||||
|
||||
if compType.typeId == univ.Choice.typeId and not compType.tagSet:
|
||||
if asn1Spec is None:
|
||||
return component.getComponent().tagSet
|
||||
else:
|
||||
# TODO: move out of sorting key function
|
||||
names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
|
||||
if namedType.name in component]
|
||||
if len(names) != 1:
|
||||
raise error.PyAsn1Error(
|
||||
'%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', component))
|
||||
|
||||
# TODO: support nested CHOICE ordering
|
||||
return asn1Spec[names[0]].tagSet
|
||||
|
||||
else:
|
||||
return compType.tagSet
|
||||
|
||||
|
||||
TAG_MAP = encoder.TAG_MAP.copy()
|
||||
|
||||
TAG_MAP.update({
|
||||
# Set & SetOf have same tags
|
||||
univ.Set.tagSet: SetEncoder()
|
||||
})
|
||||
|
||||
TYPE_MAP = encoder.TYPE_MAP.copy()
|
||||
|
||||
TYPE_MAP.update({
|
||||
# Set & SetOf have same tags
|
||||
univ.Set.typeId: SetEncoder()
|
||||
})
|
||||
|
||||
|
||||
class SingleItemEncoder(encoder.SingleItemEncoder):
|
||||
fixedDefLengthMode = True
|
||||
fixedChunkSize = 0
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
|
||||
#: Turns ASN.1 object into DER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a DER octet stream.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
|
||||
#: parameter is required to guide the encoding process.
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec:
|
||||
#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octet-stream
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode Python value into DER with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> encode([1, 2, 3], asn1Spec=seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
#: Encode ASN.1 value object into DER
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
encode = Encoder()
|
||||
233
lib/pyasn1/codec/native/decoder.py
Normal file
233
lib/pyasn1/codec/native/decoder.py
Normal file
@@ -0,0 +1,233 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['decode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
|
||||
|
||||
|
||||
class AbstractScalarPayloadDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
return asn1Spec.clone(pyObject)
|
||||
|
||||
|
||||
class BitStringPayloadDecoder(AbstractScalarPayloadDecoder):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject))
|
||||
|
||||
|
||||
class SequenceOrSetPayloadDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
componentsTypes = asn1Spec.componentType
|
||||
|
||||
for field in asn1Value:
|
||||
if field in pyObject:
|
||||
asn1Value[field] = decodeFun(pyObject[field], componentsTypes[field].asn1Object, **options)
|
||||
|
||||
return asn1Value
|
||||
|
||||
|
||||
class SequenceOfOrSetOfPayloadDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
for pyValue in pyObject:
|
||||
asn1Value.append(decodeFun(pyValue, asn1Spec.componentType), **options)
|
||||
|
||||
return asn1Value
|
||||
|
||||
|
||||
class ChoicePayloadDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
componentsTypes = asn1Spec.componentType
|
||||
|
||||
for field in pyObject:
|
||||
if field in componentsTypes:
|
||||
asn1Value[field] = decodeFun(pyObject[field], componentsTypes[field].asn1Object, **options)
|
||||
break
|
||||
|
||||
return asn1Value
|
||||
|
||||
|
||||
TAG_MAP = {
|
||||
univ.Integer.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Boolean.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.BitString.tagSet: BitStringPayloadDecoder(),
|
||||
univ.OctetString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Null.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.ObjectIdentifier.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Enumerated.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Real.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Sequence.tagSet: SequenceOrSetPayloadDecoder(), # conflicts with SequenceOf
|
||||
univ.Set.tagSet: SequenceOrSetPayloadDecoder(), # conflicts with SetOf
|
||||
univ.Choice.tagSet: ChoicePayloadDecoder(), # conflicts with Any
|
||||
# character string types
|
||||
char.UTF8String.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.NumericString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.PrintableString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.TeletexString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.VideotexString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.IA5String.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.GraphicString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.VisibleString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.GeneralString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.UniversalString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.BMPString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: AbstractScalarPayloadDecoder(),
|
||||
useful.GeneralizedTime.tagSet: AbstractScalarPayloadDecoder(),
|
||||
useful.UTCTime.tagSet: AbstractScalarPayloadDecoder()
|
||||
}
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
TYPE_MAP = {
|
||||
univ.Integer.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Boolean.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.BitString.typeId: BitStringPayloadDecoder(),
|
||||
univ.OctetString.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Null.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.ObjectIdentifier.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Enumerated.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Real.typeId: AbstractScalarPayloadDecoder(),
|
||||
# ambiguous base types
|
||||
univ.Set.typeId: SequenceOrSetPayloadDecoder(),
|
||||
univ.SetOf.typeId: SequenceOfOrSetOfPayloadDecoder(),
|
||||
univ.Sequence.typeId: SequenceOrSetPayloadDecoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfOrSetOfPayloadDecoder(),
|
||||
univ.Choice.typeId: ChoicePayloadDecoder(),
|
||||
univ.Any.typeId: AbstractScalarPayloadDecoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.NumericString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.PrintableString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.TeletexString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.VideotexString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.IA5String.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.GraphicString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.VisibleString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.GeneralString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.UniversalString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.BMPString.typeId: AbstractScalarPayloadDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: AbstractScalarPayloadDecoder(),
|
||||
useful.GeneralizedTime.typeId: AbstractScalarPayloadDecoder(),
|
||||
useful.UTCTime.typeId: AbstractScalarPayloadDecoder()
|
||||
}
|
||||
|
||||
|
||||
class SingleItemDecoder(object):
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
def __init__(self, **options):
|
||||
self._tagMap = options.get('tagMap', self.TAG_MAP)
|
||||
self._typeMap = options.get('typeMap', self.TYPE_MAP)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec, **options):
|
||||
|
||||
if LOG:
|
||||
debug.scope.push(type(pyObject).__name__)
|
||||
LOG('decoder called at scope %s, working with '
|
||||
'type %s' % (debug.scope, type(pyObject).__name__))
|
||||
|
||||
if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item):
|
||||
raise error.PyAsn1Error(
|
||||
'asn1Spec is not valid (should be an instance of an ASN.1 '
|
||||
'Item, not %s)' % asn1Spec.__class__.__name__)
|
||||
|
||||
try:
|
||||
valueDecoder = self._typeMap[asn1Spec.typeId]
|
||||
|
||||
except KeyError:
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag)
|
||||
|
||||
try:
|
||||
valueDecoder = self._tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet)
|
||||
|
||||
if LOG:
|
||||
LOG('calling decoder %s on Python type %s '
|
||||
'<%s>' % (type(valueDecoder).__name__,
|
||||
type(pyObject).__name__, repr(pyObject)))
|
||||
|
||||
value = valueDecoder(pyObject, asn1Spec, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('decoder %s produced ASN.1 type %s '
|
||||
'<%s>' % (type(valueDecoder).__name__,
|
||||
type(value).__name__, repr(value)))
|
||||
debug.scope.pop()
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class Decoder(object):
|
||||
SINGLE_ITEM_DECODER = SingleItemDecoder
|
||||
|
||||
def __init__(self, **options):
|
||||
self._singleItemDecoder = self.SINGLE_ITEM_DECODER(**options)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec=None, **kwargs):
|
||||
return self._singleItemDecoder(pyObject, asn1Spec=asn1Spec, **kwargs)
|
||||
|
||||
|
||||
#: Turns Python objects of built-in types into ASN.1 objects.
|
||||
#:
|
||||
#: Takes Python objects of built-in types and turns them into a tree of
|
||||
#: ASN.1 objects (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
|
||||
#: may be a scalar or an arbitrary nested structure.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: pyObject: :py:class:`object`
|
||||
#: A scalar or nested Python objects
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A pyasn1 type object to act as a template guiding the decoder. It is required
|
||||
#: for successful interpretation of Python objects mapping into their ASN.1
|
||||
#: representations.
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A scalar or constructed pyasn1 object
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On decoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Decode native Python object into ASN.1 objects with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> s, _ = decode([1, 2, 3], asn1Spec=seq)
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder()
|
||||
269
lib/pyasn1/codec/native/encoder.py
Normal file
269
lib/pyasn1/codec/native/encoder.py
Normal file
@@ -0,0 +1,269 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from collections import OrderedDict
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['encode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
|
||||
|
||||
|
||||
class AbstractItemEncoder(object):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
raise error.PyAsn1Error('Not implemented')
|
||||
|
||||
|
||||
class BooleanEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return bool(value)
|
||||
|
||||
|
||||
class IntegerEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return int(value)
|
||||
|
||||
|
||||
class BitStringEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class OctetStringEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return value.asOctets()
|
||||
|
||||
|
||||
class TextStringEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class NullEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return None
|
||||
|
||||
|
||||
class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return float(value)
|
||||
|
||||
|
||||
class SetEncoder(AbstractItemEncoder):
|
||||
protoDict = dict
|
||||
|
||||
def encode(self, value, encodeFun, **options):
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
substrate = self.protoDict()
|
||||
|
||||
for idx, (key, subValue) in enumerate(value.items()):
|
||||
if namedTypes and namedTypes[idx].isOptional and not value[idx].isValue:
|
||||
continue
|
||||
substrate[key] = encodeFun(subValue, **options)
|
||||
return substrate
|
||||
|
||||
|
||||
class SequenceEncoder(SetEncoder):
|
||||
protoDict = OrderedDict
|
||||
|
||||
|
||||
class SequenceOfEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
return [encodeFun(x, **options) for x in value]
|
||||
|
||||
|
||||
class ChoiceEncoder(SequenceEncoder):
|
||||
pass
|
||||
|
||||
|
||||
class AnyEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return value.asOctets()
|
||||
|
||||
|
||||
TAG_MAP = {
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SequenceOf.tagSet: SequenceOfEncoder(),
|
||||
univ.SetOf.tagSet: SequenceOfEncoder(),
|
||||
univ.Choice.tagSet: ChoiceEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.tagSet: TextStringEncoder(),
|
||||
char.NumericString.tagSet: TextStringEncoder(),
|
||||
char.PrintableString.tagSet: TextStringEncoder(),
|
||||
char.TeletexString.tagSet: TextStringEncoder(),
|
||||
char.VideotexString.tagSet: TextStringEncoder(),
|
||||
char.IA5String.tagSet: TextStringEncoder(),
|
||||
char.GraphicString.tagSet: TextStringEncoder(),
|
||||
char.VisibleString.tagSet: TextStringEncoder(),
|
||||
char.GeneralString.tagSet: TextStringEncoder(),
|
||||
char.UniversalString.tagSet: TextStringEncoder(),
|
||||
char.BMPString.tagSet: TextStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
|
||||
useful.UTCTime.tagSet: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
TYPE_MAP = {
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Integer.typeId: IntegerEncoder(),
|
||||
univ.BitString.typeId: BitStringEncoder(),
|
||||
univ.OctetString.typeId: OctetStringEncoder(),
|
||||
univ.Null.typeId: NullEncoder(),
|
||||
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.typeId: IntegerEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.Set.typeId: SetEncoder(),
|
||||
univ.SetOf.typeId: SequenceOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder(),
|
||||
univ.Choice.typeId: ChoiceEncoder(),
|
||||
univ.Any.typeId: AnyEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: OctetStringEncoder(),
|
||||
char.NumericString.typeId: OctetStringEncoder(),
|
||||
char.PrintableString.typeId: OctetStringEncoder(),
|
||||
char.TeletexString.typeId: OctetStringEncoder(),
|
||||
char.VideotexString.typeId: OctetStringEncoder(),
|
||||
char.IA5String.typeId: OctetStringEncoder(),
|
||||
char.GraphicString.typeId: OctetStringEncoder(),
|
||||
char.VisibleString.typeId: OctetStringEncoder(),
|
||||
char.GeneralString.typeId: OctetStringEncoder(),
|
||||
char.UniversalString.typeId: OctetStringEncoder(),
|
||||
char.BMPString.typeId: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.typeId: OctetStringEncoder(),
|
||||
useful.UTCTime.typeId: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
class SingleItemEncoder(object):
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
def __init__(self, **options):
|
||||
self._tagMap = options.get('tagMap', self.TAG_MAP)
|
||||
self._typeMap = options.get('typeMap', self.TYPE_MAP)
|
||||
|
||||
def __call__(self, value, **options):
|
||||
if not isinstance(value, base.Asn1Item):
|
||||
raise error.PyAsn1Error(
|
||||
'value is not valid (should be an instance of an ASN.1 Item)')
|
||||
|
||||
if LOG:
|
||||
debug.scope.push(type(value).__name__)
|
||||
LOG('encoder called for type %s '
|
||||
'<%s>' % (type(value).__name__, value.prettyPrint()))
|
||||
|
||||
tagSet = value.tagSet
|
||||
|
||||
try:
|
||||
concreteEncoder = self._typeMap[value.typeId]
|
||||
|
||||
except KeyError:
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(
|
||||
value.tagSet.baseTag, value.tagSet.baseTag)
|
||||
|
||||
try:
|
||||
concreteEncoder = self._tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('No encoder for %s' % (value,))
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by '
|
||||
'%s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
|
||||
pyObject = concreteEncoder.encode(value, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('encoder %s produced: '
|
||||
'%s' % (type(concreteEncoder).__name__, repr(pyObject)))
|
||||
debug.scope.pop()
|
||||
|
||||
return pyObject
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
def __init__(self, **options):
|
||||
self._singleItemEncoder = self.SINGLE_ITEM_ENCODER(**options)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec=None, **options):
|
||||
return self._singleItemEncoder(
|
||||
pyObject, asn1Spec=asn1Spec, **options)
|
||||
|
||||
|
||||
#: Turns ASN.1 object into a Python built-in type object(s).
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a Python built-in type or a tree
|
||||
#: of those.
|
||||
#:
|
||||
#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict`
|
||||
#: is used to preserve ordering of the components in ASN.1 SEQUENCE.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
# asn1Value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: pyasn1 object to encode (or a tree of them)
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`object`
|
||||
#: Python built-in type instance (or a tree of them)
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode ASN.1 value object into native Python types
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: [1, 2, 3]
|
||||
#:
|
||||
encode = SingleItemEncoder()
|
||||
243
lib/pyasn1/codec/streaming.py
Normal file
243
lib/pyasn1/codec/streaming.py
Normal file
@@ -0,0 +1,243 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import univ
|
||||
|
||||
_PY2 = sys.version_info < (3,)
|
||||
|
||||
|
||||
class CachingStreamWrapper(io.IOBase):
|
||||
"""Wrapper around non-seekable streams.
|
||||
|
||||
Note that the implementation is tied to the decoder,
|
||||
not checking for dangerous arguments for the sake
|
||||
of performance.
|
||||
|
||||
The read bytes are kept in an internal cache until
|
||||
setting _markedPosition which may reset the cache.
|
||||
"""
|
||||
def __init__(self, raw):
|
||||
self._raw = raw
|
||||
self._cache = io.BytesIO()
|
||||
self._markedPosition = 0
|
||||
|
||||
def peek(self, n):
|
||||
result = self.read(n)
|
||||
self._cache.seek(-len(result), os.SEEK_CUR)
|
||||
return result
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def seek(self, n=-1, whence=os.SEEK_SET):
|
||||
# Note that this not safe for seeking forward.
|
||||
return self._cache.seek(n, whence)
|
||||
|
||||
def read(self, n=-1):
|
||||
read_from_cache = self._cache.read(n)
|
||||
if n != -1:
|
||||
n -= len(read_from_cache)
|
||||
if not n: # 0 bytes left to read
|
||||
return read_from_cache
|
||||
|
||||
read_from_raw = self._raw.read(n)
|
||||
|
||||
self._cache.write(read_from_raw)
|
||||
|
||||
return read_from_cache + read_from_raw
|
||||
|
||||
@property
|
||||
def markedPosition(self):
|
||||
"""Position where the currently processed element starts.
|
||||
|
||||
This is used for back-tracking in SingleItemDecoder.__call__
|
||||
and (indefLen)ValueDecoder and should not be used for other purposes.
|
||||
The client is not supposed to ever seek before this position.
|
||||
"""
|
||||
return self._markedPosition
|
||||
|
||||
@markedPosition.setter
|
||||
def markedPosition(self, value):
|
||||
# By setting the value, we ensure we won't seek back before it.
|
||||
# `value` should be the same as the current position
|
||||
# We don't check for this for performance reasons.
|
||||
self._markedPosition = value
|
||||
|
||||
# Whenever we set _marked_position, we know for sure
|
||||
# that we will not return back, and thus it is
|
||||
# safe to drop all cached data.
|
||||
if self._cache.tell() > io.DEFAULT_BUFFER_SIZE:
|
||||
self._cache = io.BytesIO(self._cache.read())
|
||||
self._markedPosition = 0
|
||||
|
||||
def tell(self):
|
||||
return self._cache.tell()
|
||||
|
||||
|
||||
def asSeekableStream(substrate):
|
||||
"""Convert object to seekable byte-stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`bytes` or :py:class:`io.IOBase` or :py:class:`univ.OctetString`
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`io.IOBase`
|
||||
|
||||
Raises
|
||||
------
|
||||
: :py:class:`~pyasn1.error.PyAsn1Error`
|
||||
If the supplied substrate cannot be converted to a seekable stream.
|
||||
"""
|
||||
if isinstance(substrate, io.BytesIO):
|
||||
return substrate
|
||||
|
||||
elif isinstance(substrate, bytes):
|
||||
return io.BytesIO(substrate)
|
||||
|
||||
elif isinstance(substrate, univ.OctetString):
|
||||
return io.BytesIO(substrate.asOctets())
|
||||
|
||||
try:
|
||||
# Special case: impossible to set attributes on `file` built-in
|
||||
if _PY2 and isinstance(substrate, file):
|
||||
return io.BufferedReader(substrate)
|
||||
|
||||
elif substrate.seekable(): # Will fail for most invalid types
|
||||
return substrate
|
||||
|
||||
else:
|
||||
return CachingStreamWrapper(substrate)
|
||||
|
||||
except AttributeError:
|
||||
raise error.UnsupportedSubstrateError(
|
||||
"Cannot convert " + substrate.__class__.__name__ +
|
||||
" to a seekable bit stream.")
|
||||
|
||||
|
||||
def isEndOfStream(substrate):
|
||||
"""Check whether we have reached the end of a stream.
|
||||
|
||||
Although it is more effective to read and catch exceptions, this
|
||||
function
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`IOBase`
|
||||
Stream to check
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`bool`
|
||||
"""
|
||||
if isinstance(substrate, io.BytesIO):
|
||||
cp = substrate.tell()
|
||||
substrate.seek(0, os.SEEK_END)
|
||||
result = substrate.tell() == cp
|
||||
substrate.seek(cp, os.SEEK_SET)
|
||||
yield result
|
||||
|
||||
else:
|
||||
received = substrate.read(1)
|
||||
if received is None:
|
||||
yield
|
||||
|
||||
if received:
|
||||
substrate.seek(-1, os.SEEK_CUR)
|
||||
|
||||
yield not received
|
||||
|
||||
|
||||
def peekIntoStream(substrate, size=-1):
|
||||
"""Peek into stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`IOBase`
|
||||
Stream to read from.
|
||||
|
||||
size: :py:class:`int`
|
||||
How many bytes to peek (-1 = all available)
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`bytes` or :py:class:`str`
|
||||
The return type depends on Python major version
|
||||
"""
|
||||
if hasattr(substrate, "peek"):
|
||||
received = substrate.peek(size)
|
||||
if received is None:
|
||||
yield
|
||||
|
||||
while len(received) < size:
|
||||
yield
|
||||
|
||||
yield received
|
||||
|
||||
else:
|
||||
current_position = substrate.tell()
|
||||
try:
|
||||
for chunk in readFromStream(substrate, size):
|
||||
yield chunk
|
||||
|
||||
finally:
|
||||
substrate.seek(current_position)
|
||||
|
||||
|
||||
def readFromStream(substrate, size=-1, context=None):
|
||||
"""Read from the stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`IOBase`
|
||||
Stream to read from.
|
||||
|
||||
Keyword parameters
|
||||
------------------
|
||||
size: :py:class:`int`
|
||||
How many bytes to read (-1 = all available)
|
||||
|
||||
context: :py:class:`dict`
|
||||
Opaque caller context will be attached to exception objects created
|
||||
by this function.
|
||||
|
||||
Yields
|
||||
------
|
||||
: :py:class:`bytes` or :py:class:`str` or :py:class:`SubstrateUnderrunError`
|
||||
Read data or :py:class:`~pyasn1.error.SubstrateUnderrunError`
|
||||
object if no `size` bytes is readily available in the stream. The
|
||||
data type depends on Python major version
|
||||
|
||||
Raises
|
||||
------
|
||||
: :py:class:`~pyasn1.error.EndOfStreamError`
|
||||
Input stream is exhausted
|
||||
"""
|
||||
while True:
|
||||
# this will block unless stream is non-blocking
|
||||
received = substrate.read(size)
|
||||
if received is None: # non-blocking stream can do this
|
||||
yield error.SubstrateUnderrunError(context=context)
|
||||
|
||||
elif not received and size != 0: # end-of-stream
|
||||
raise error.EndOfStreamError(context=context)
|
||||
|
||||
elif len(received) < size:
|
||||
substrate.seek(-len(received), os.SEEK_CUR)
|
||||
|
||||
# behave like a non-blocking stream
|
||||
yield error.SubstrateUnderrunError(context=context)
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
yield received
|
||||
103
lib/pyasn1/compat/integer.py
Normal file
103
lib/pyasn1/compat/integer.py
Normal file
@@ -0,0 +1,103 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
import platform
|
||||
|
||||
from pyasn1.compat.octets import oct2int, null, ensureString
|
||||
|
||||
|
||||
implementation = platform.python_implementation()
|
||||
|
||||
if sys.version_info[0] < 3 or implementation != 'CPython':
|
||||
from binascii import a2b_hex, b2a_hex
|
||||
|
||||
def from_bytes(octets, signed=False):
|
||||
if not octets:
|
||||
return 0
|
||||
|
||||
value = long(b2a_hex(ensureString(octets)), 16)
|
||||
|
||||
if signed and oct2int(octets[0]) & 0x80:
|
||||
return value - (1 << len(octets) * 8)
|
||||
|
||||
return value
|
||||
|
||||
def to_bytes(value, signed=False, length=0):
|
||||
if value < 0:
|
||||
if signed:
|
||||
bits = bitLength(value)
|
||||
|
||||
# two's complement form
|
||||
maxValue = 1 << bits
|
||||
valueToEncode = (value + maxValue) % maxValue
|
||||
|
||||
else:
|
||||
raise OverflowError('can\'t convert negative int to unsigned')
|
||||
elif value == 0 and length == 0:
|
||||
return null
|
||||
else:
|
||||
bits = 0
|
||||
valueToEncode = value
|
||||
|
||||
hexValue = hex(valueToEncode)[2:]
|
||||
if hexValue.endswith('L'):
|
||||
hexValue = hexValue[:-1]
|
||||
|
||||
if len(hexValue) & 1:
|
||||
hexValue = '0' + hexValue
|
||||
|
||||
# padding may be needed for two's complement encoding
|
||||
if value != valueToEncode or length:
|
||||
hexLength = len(hexValue) * 4
|
||||
|
||||
padLength = max(length, bits)
|
||||
|
||||
if padLength > hexLength:
|
||||
hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue
|
||||
elif length and hexLength - length > 7:
|
||||
raise OverflowError('int too big to convert')
|
||||
|
||||
firstOctet = int(hexValue[:2], 16)
|
||||
|
||||
if signed:
|
||||
if firstOctet & 0x80:
|
||||
if value >= 0:
|
||||
hexValue = '00' + hexValue
|
||||
elif value < 0:
|
||||
hexValue = 'ff' + hexValue
|
||||
|
||||
octets_value = a2b_hex(hexValue)
|
||||
|
||||
return octets_value
|
||||
|
||||
def bitLength(number):
|
||||
# bits in unsigned number
|
||||
hexValue = hex(abs(number))
|
||||
bits = len(hexValue) - 2
|
||||
if hexValue.endswith('L'):
|
||||
bits -= 1
|
||||
if bits & 1:
|
||||
bits += 1
|
||||
bits *= 4
|
||||
# TODO: strip lhs zeros
|
||||
return bits
|
||||
|
||||
else:
|
||||
|
||||
def from_bytes(octets, signed=False):
|
||||
return int.from_bytes(bytes(octets), 'big', signed=signed)
|
||||
|
||||
def to_bytes(value, signed=False, length=0):
|
||||
length = max(value.bit_length(), length)
|
||||
|
||||
if signed and length % 8 == 0:
|
||||
length += 1
|
||||
|
||||
return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed)
|
||||
|
||||
def bitLength(number):
|
||||
return int(number).bit_length()
|
||||
46
lib/pyasn1/compat/octets.py
Normal file
46
lib/pyasn1/compat/octets.py
Normal file
@@ -0,0 +1,46 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from sys import version_info
|
||||
|
||||
if version_info[0] <= 2:
|
||||
int2oct = chr
|
||||
# noinspection PyPep8
|
||||
ints2octs = lambda s: ''.join([int2oct(x) for x in s])
|
||||
null = ''
|
||||
oct2int = ord
|
||||
# TODO: refactor to return a sequence of ints
|
||||
# noinspection PyPep8
|
||||
octs2ints = lambda s: [oct2int(x) for x in s]
|
||||
# noinspection PyPep8
|
||||
str2octs = lambda x: x
|
||||
# noinspection PyPep8
|
||||
octs2str = lambda x: x
|
||||
# noinspection PyPep8
|
||||
isOctetsType = lambda s: isinstance(s, str)
|
||||
# noinspection PyPep8
|
||||
isStringType = lambda s: isinstance(s, (str, unicode))
|
||||
# noinspection PyPep8
|
||||
ensureString = str
|
||||
else:
|
||||
ints2octs = bytes
|
||||
# noinspection PyPep8
|
||||
int2oct = lambda x: ints2octs((x,))
|
||||
null = ints2octs()
|
||||
# noinspection PyPep8
|
||||
oct2int = lambda x: x
|
||||
# noinspection PyPep8
|
||||
octs2ints = lambda x: x
|
||||
# noinspection PyPep8
|
||||
str2octs = lambda x: x.encode('iso-8859-1')
|
||||
# noinspection PyPep8
|
||||
octs2str = lambda x: x.decode('iso-8859-1')
|
||||
# noinspection PyPep8
|
||||
isOctetsType = lambda s: isinstance(s, bytes)
|
||||
# noinspection PyPep8
|
||||
isStringType = lambda s: isinstance(s, str)
|
||||
# noinspection PyPep8
|
||||
ensureString = bytes
|
||||
147
lib/pyasn1/debug.py
Normal file
147
lib/pyasn1/debug.py
Normal file
@@ -0,0 +1,147 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pyasn1 import __version__
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat.octets import octs2ints
|
||||
|
||||
__all__ = ['Debug', 'setLogger', 'hexdump']
|
||||
|
||||
DEBUG_NONE = 0x0000
|
||||
DEBUG_ENCODER = 0x0001
|
||||
DEBUG_DECODER = 0x0002
|
||||
DEBUG_ALL = 0xffff
|
||||
|
||||
FLAG_MAP = {
|
||||
'none': DEBUG_NONE,
|
||||
'encoder': DEBUG_ENCODER,
|
||||
'decoder': DEBUG_DECODER,
|
||||
'all': DEBUG_ALL
|
||||
}
|
||||
|
||||
LOGGEE_MAP = {}
|
||||
|
||||
|
||||
class Printer(object):
|
||||
# noinspection PyShadowingNames
|
||||
def __init__(self, logger=None, handler=None, formatter=None):
|
||||
if logger is None:
|
||||
logger = logging.getLogger('pyasn1')
|
||||
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
if handler is None:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
if formatter is None:
|
||||
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
logger.addHandler(handler)
|
||||
|
||||
self.__logger = logger
|
||||
|
||||
def __call__(self, msg):
|
||||
self.__logger.debug(msg)
|
||||
|
||||
def __str__(self):
|
||||
return '<python logging>'
|
||||
|
||||
|
||||
class Debug(object):
|
||||
defaultPrinter = Printer()
|
||||
|
||||
def __init__(self, *flags, **options):
|
||||
self._flags = DEBUG_NONE
|
||||
|
||||
if 'loggerName' in options:
|
||||
# route our logs to parent logger
|
||||
self._printer = Printer(
|
||||
logger=logging.getLogger(options['loggerName']),
|
||||
handler=logging.NullHandler()
|
||||
)
|
||||
|
||||
elif 'printer' in options:
|
||||
self._printer = options.get('printer')
|
||||
|
||||
else:
|
||||
self._printer = self.defaultPrinter
|
||||
|
||||
self._printer('running pyasn1 %s, debug flags %s' % (__version__, ', '.join(flags)))
|
||||
|
||||
for flag in flags:
|
||||
inverse = flag and flag[0] in ('!', '~')
|
||||
if inverse:
|
||||
flag = flag[1:]
|
||||
try:
|
||||
if inverse:
|
||||
self._flags &= ~FLAG_MAP[flag]
|
||||
else:
|
||||
self._flags |= FLAG_MAP[flag]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('bad debug flag %s' % flag)
|
||||
|
||||
self._printer("debug category '%s' %s" % (flag, inverse and 'disabled' or 'enabled'))
|
||||
|
||||
def __str__(self):
|
||||
return 'logger %s, flags %x' % (self._printer, self._flags)
|
||||
|
||||
def __call__(self, msg):
|
||||
self._printer(msg)
|
||||
|
||||
def __and__(self, flag):
|
||||
return self._flags & flag
|
||||
|
||||
def __rand__(self, flag):
|
||||
return flag & self._flags
|
||||
|
||||
_LOG = DEBUG_NONE
|
||||
|
||||
|
||||
def setLogger(userLogger):
|
||||
global _LOG
|
||||
|
||||
if userLogger:
|
||||
_LOG = userLogger
|
||||
else:
|
||||
_LOG = DEBUG_NONE
|
||||
|
||||
# Update registered logging clients
|
||||
for module, (name, flags) in LOGGEE_MAP.items():
|
||||
setattr(module, name, _LOG & flags and _LOG or DEBUG_NONE)
|
||||
|
||||
|
||||
def registerLoggee(module, name='LOG', flags=DEBUG_NONE):
|
||||
LOGGEE_MAP[sys.modules[module]] = name, flags
|
||||
setLogger(_LOG)
|
||||
return _LOG
|
||||
|
||||
|
||||
def hexdump(octets):
|
||||
return ' '.join(
|
||||
['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x)
|
||||
for n, x in zip(range(len(octets)), octs2ints(octets))]
|
||||
)
|
||||
|
||||
|
||||
class Scope(object):
|
||||
def __init__(self):
|
||||
self._list = []
|
||||
|
||||
def __str__(self): return '.'.join(self._list)
|
||||
|
||||
def push(self, token):
|
||||
self._list.append(token)
|
||||
|
||||
def pop(self):
|
||||
return self._list.pop()
|
||||
|
||||
|
||||
scope = Scope()
|
||||
116
lib/pyasn1/error.py
Normal file
116
lib/pyasn1/error.py
Normal file
@@ -0,0 +1,116 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
|
||||
|
||||
class PyAsn1Error(Exception):
|
||||
"""Base pyasn1 exception
|
||||
|
||||
`PyAsn1Error` is the base exception class (based on
|
||||
:class:`Exception`) that represents all possible ASN.1 related
|
||||
errors.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
args:
|
||||
Opaque positional parameters
|
||||
|
||||
Keyword Args
|
||||
------------
|
||||
kwargs:
|
||||
Opaque keyword parameters
|
||||
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._args = args
|
||||
self._kwargs = kwargs
|
||||
|
||||
@property
|
||||
def context(self):
|
||||
"""Return exception context
|
||||
|
||||
When exception object is created, the caller can supply some opaque
|
||||
context for the upper layers to better understand the cause of the
|
||||
exception.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`dict`
|
||||
Dict holding context specific data
|
||||
"""
|
||||
return self._kwargs.get('context', {})
|
||||
|
||||
|
||||
class ValueConstraintError(PyAsn1Error):
|
||||
"""ASN.1 type constraints violation exception
|
||||
|
||||
The `ValueConstraintError` exception indicates an ASN.1 value
|
||||
constraint violation.
|
||||
|
||||
It might happen on value object instantiation (for scalar types) or on
|
||||
serialization (for constructed types).
|
||||
"""
|
||||
|
||||
|
||||
class SubstrateUnderrunError(PyAsn1Error):
|
||||
"""ASN.1 data structure deserialization error
|
||||
|
||||
The `SubstrateUnderrunError` exception indicates insufficient serialised
|
||||
data on input of a de-serialization codec.
|
||||
"""
|
||||
|
||||
|
||||
class EndOfStreamError(SubstrateUnderrunError):
|
||||
"""ASN.1 data structure deserialization error
|
||||
|
||||
The `EndOfStreamError` exception indicates the condition of the input
|
||||
stream has been closed.
|
||||
"""
|
||||
|
||||
|
||||
class UnsupportedSubstrateError(PyAsn1Error):
|
||||
"""Unsupported substrate type to parse as ASN.1 data."""
|
||||
|
||||
|
||||
class PyAsn1UnicodeError(PyAsn1Error, UnicodeError):
|
||||
"""Unicode text processing error
|
||||
|
||||
The `PyAsn1UnicodeError` exception is a base class for errors relating to
|
||||
unicode text de/serialization.
|
||||
|
||||
Apart from inheriting from :class:`PyAsn1Error`, it also inherits from
|
||||
:class:`UnicodeError` to help the caller catching unicode-related errors.
|
||||
"""
|
||||
def __init__(self, message, unicode_error=None):
|
||||
if isinstance(unicode_error, UnicodeError):
|
||||
UnicodeError.__init__(self, *unicode_error.args)
|
||||
PyAsn1Error.__init__(self, message)
|
||||
|
||||
|
||||
class PyAsn1UnicodeDecodeError(PyAsn1UnicodeError, UnicodeDecodeError):
|
||||
"""Unicode text decoding error
|
||||
|
||||
The `PyAsn1UnicodeDecodeError` exception represents a failure to
|
||||
deserialize unicode text.
|
||||
|
||||
Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
|
||||
from :class:`UnicodeDecodeError` to help the caller catching unicode-related
|
||||
errors.
|
||||
"""
|
||||
|
||||
|
||||
class PyAsn1UnicodeEncodeError(PyAsn1UnicodeError, UnicodeEncodeError):
|
||||
"""Unicode text encoding error
|
||||
|
||||
The `PyAsn1UnicodeEncodeError` exception represents a failure to
|
||||
serialize unicode text.
|
||||
|
||||
Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
|
||||
from :class:`UnicodeEncodeError` to help the caller catching
|
||||
unicode-related errors.
|
||||
"""
|
||||
|
||||
|
||||
1
lib/pyasn1/type/__init__.py
Normal file
1
lib/pyasn1/type/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
706
lib/pyasn1/type/base.py
Normal file
706
lib/pyasn1/type/base.py
Normal file
@@ -0,0 +1,706 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import constraint
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import tagmap
|
||||
|
||||
__all__ = ['Asn1Item', 'Asn1Type', 'SimpleAsn1Type',
|
||||
'ConstructedAsn1Type']
|
||||
|
||||
|
||||
class Asn1Item(object):
|
||||
@classmethod
|
||||
def getTypeId(cls, increment=1):
|
||||
try:
|
||||
Asn1Item._typeCounter += increment
|
||||
except AttributeError:
|
||||
Asn1Item._typeCounter = increment
|
||||
return Asn1Item._typeCounter
|
||||
|
||||
|
||||
class Asn1Type(Asn1Item):
|
||||
"""Base class for all classes representing ASN.1 types.
|
||||
|
||||
In the user code, |ASN.1| class is normally used only for telling
|
||||
ASN.1 objects from others.
|
||||
|
||||
Note
|
||||
----
|
||||
For as long as ASN.1 is concerned, a way to compare ASN.1 types
|
||||
is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
|
||||
"""
|
||||
#: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing
|
||||
#: ASN.1 tag(s) associated with |ASN.1| type.
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
#: object imposing constraints on initialization values.
|
||||
subtypeSpec = constraint.ConstraintsIntersection()
|
||||
|
||||
# Disambiguation ASN.1 types identification
|
||||
typeId = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
readOnly = {
|
||||
'tagSet': self.tagSet,
|
||||
'subtypeSpec': self.subtypeSpec
|
||||
}
|
||||
|
||||
readOnly.update(kwargs)
|
||||
|
||||
self.__dict__.update(readOnly)
|
||||
|
||||
self._readOnly = readOnly
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name[0] != '_' and name in self._readOnly:
|
||||
raise error.PyAsn1Error('read-only instance attribute "%s"' % name)
|
||||
|
||||
self.__dict__[name] = value
|
||||
|
||||
def __str__(self):
|
||||
return self.prettyPrint()
|
||||
|
||||
@property
|
||||
def readOnly(self):
|
||||
return self._readOnly
|
||||
|
||||
@property
|
||||
def effectiveTagSet(self):
|
||||
"""For |ASN.1| type is equivalent to *tagSet*
|
||||
"""
|
||||
return self.tagSet # used by untagged types
|
||||
|
||||
@property
|
||||
def tagMap(self):
|
||||
"""Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping ASN.1 tags to ASN.1 objects within callee object.
|
||||
"""
|
||||
return tagmap.TagMap({self.tagSet: self})
|
||||
|
||||
def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
|
||||
"""Examine |ASN.1| type for equality with other ASN.1 type.
|
||||
|
||||
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
|
||||
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
|
||||
out ASN.1 types comparison.
|
||||
|
||||
Python class inheritance relationship is NOT considered.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
other: a pyasn1 type object
|
||||
Class instance representing ASN.1 type.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`bool`
|
||||
:obj:`True` if *other* is |ASN.1| type,
|
||||
:obj:`False` otherwise.
|
||||
"""
|
||||
return (self is other or
|
||||
(not matchTags or self.tagSet == other.tagSet) and
|
||||
(not matchConstraints or self.subtypeSpec == other.subtypeSpec))
|
||||
|
||||
def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
|
||||
"""Examine |ASN.1| type for subtype relationship with other ASN.1 type.
|
||||
|
||||
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
|
||||
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
|
||||
out ASN.1 types comparison.
|
||||
|
||||
Python class inheritance relationship is NOT considered.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
other: a pyasn1 type object
|
||||
Class instance representing ASN.1 type.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`bool`
|
||||
:obj:`True` if *other* is a subtype of |ASN.1| type,
|
||||
:obj:`False` otherwise.
|
||||
"""
|
||||
return (not matchTags or
|
||||
(self.tagSet.isSuperTagSetOf(other.tagSet)) and
|
||||
(not matchConstraints or self.subtypeSpec.isSuperTypeOf(other.subtypeSpec)))
|
||||
|
||||
@staticmethod
|
||||
def isNoValue(*values):
|
||||
for value in values:
|
||||
if value is not noValue:
|
||||
return False
|
||||
return True
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
raise NotImplementedError()
|
||||
|
||||
# backward compatibility
|
||||
|
||||
def getTagSet(self):
|
||||
return self.tagSet
|
||||
|
||||
def getEffectiveTagSet(self):
|
||||
return self.effectiveTagSet
|
||||
|
||||
def getTagMap(self):
|
||||
return self.tagMap
|
||||
|
||||
def getSubtypeSpec(self):
|
||||
return self.subtypeSpec
|
||||
|
||||
# backward compatibility
|
||||
def hasValue(self):
|
||||
return self.isValue
|
||||
|
||||
# Backward compatibility
|
||||
Asn1ItemBase = Asn1Type
|
||||
|
||||
|
||||
class NoValue(object):
|
||||
"""Create a singleton instance of NoValue class.
|
||||
|
||||
The *NoValue* sentinel object represents an instance of ASN.1 schema
|
||||
object as opposed to ASN.1 value object.
|
||||
|
||||
Only ASN.1 schema-related operations can be performed on ASN.1
|
||||
schema objects.
|
||||
|
||||
Warning
|
||||
-------
|
||||
Any operation attempted on the *noValue* object will raise the
|
||||
*PyAsn1Error* exception.
|
||||
"""
|
||||
skipMethods = {
|
||||
'__slots__',
|
||||
# attributes
|
||||
'__getattribute__',
|
||||
'__getattr__',
|
||||
'__setattr__',
|
||||
'__delattr__',
|
||||
# class instance
|
||||
'__class__',
|
||||
'__init__',
|
||||
'__del__',
|
||||
'__new__',
|
||||
'__repr__',
|
||||
'__qualname__',
|
||||
'__objclass__',
|
||||
'im_class',
|
||||
'__sizeof__',
|
||||
# pickle protocol
|
||||
'__reduce__',
|
||||
'__reduce_ex__',
|
||||
'__getnewargs__',
|
||||
'__getinitargs__',
|
||||
'__getstate__',
|
||||
'__setstate__',
|
||||
}
|
||||
|
||||
_instance = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
def getPlug(name):
|
||||
def plug(self, *args, **kw):
|
||||
raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % name)
|
||||
return plug
|
||||
|
||||
op_names = [name
|
||||
for typ in (str, int, list, dict)
|
||||
for name in dir(typ)
|
||||
if (name not in cls.skipMethods and
|
||||
name.startswith('__') and
|
||||
name.endswith('__') and
|
||||
callable(getattr(typ, name)))]
|
||||
|
||||
for name in set(op_names):
|
||||
setattr(cls, name, getPlug(name))
|
||||
|
||||
cls._instance = object.__new__(cls)
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self.skipMethods:
|
||||
raise AttributeError('Attribute %s not present' % attr)
|
||||
|
||||
raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % attr)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s object>' % self.__class__.__name__
|
||||
|
||||
|
||||
noValue = NoValue()
|
||||
|
||||
|
||||
class SimpleAsn1Type(Asn1Type):
|
||||
"""Base class for all simple classes representing ASN.1 types.
|
||||
|
||||
ASN.1 distinguishes types by their ability to hold other objects.
|
||||
Scalar types are known as *simple* in ASN.1.
|
||||
|
||||
In the user code, |ASN.1| class is normally used only for telling
|
||||
ASN.1 objects from others.
|
||||
|
||||
Note
|
||||
----
|
||||
For as long as ASN.1 is concerned, a way to compare ASN.1 types
|
||||
is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
|
||||
"""
|
||||
#: Default payload value
|
||||
defaultValue = noValue
|
||||
|
||||
def __init__(self, value=noValue, **kwargs):
|
||||
Asn1Type.__init__(self, **kwargs)
|
||||
if value is noValue:
|
||||
value = self.defaultValue
|
||||
else:
|
||||
value = self.prettyIn(value)
|
||||
try:
|
||||
self.subtypeSpec(value)
|
||||
|
||||
except error.PyAsn1Error:
|
||||
exType, exValue, exTb = sys.exc_info()
|
||||
raise exType('%s at %s' % (exValue, self.__class__.__name__))
|
||||
|
||||
self._value = value
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s %s object' % (
|
||||
self.__class__.__name__, self.isValue and 'value' or 'schema')
|
||||
|
||||
for attr, value in self.readOnly.items():
|
||||
if value:
|
||||
representation += ', %s %s' % (attr, value)
|
||||
|
||||
if self.isValue:
|
||||
value = self.prettyPrint()
|
||||
if len(value) > 32:
|
||||
value = value[:16] + '...' + value[-16:]
|
||||
representation += ', payload [%s]' % value
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._value == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._value != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._value < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self._value <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._value > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._value >= other
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return self._value and True or False
|
||||
else:
|
||||
def __bool__(self):
|
||||
return self._value and True or False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._value)
|
||||
|
||||
@property
|
||||
def isValue(self):
|
||||
"""Indicate that |ASN.1| object represents ASN.1 value.
|
||||
|
||||
If *isValue* is :obj:`False` then this object represents just
|
||||
ASN.1 schema.
|
||||
|
||||
If *isValue* is :obj:`True` then, in addition to its ASN.1 schema
|
||||
features, this object can also be used like a Python built-in object
|
||||
(e.g. :class:`int`, :class:`str`, :class:`dict` etc.).
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`bool`
|
||||
:obj:`False` if object represents just ASN.1 schema.
|
||||
:obj:`True` if object represents ASN.1 schema and can be used as a normal value.
|
||||
|
||||
Note
|
||||
----
|
||||
There is an important distinction between PyASN1 schema and value objects.
|
||||
The PyASN1 schema objects can only participate in ASN.1 schema-related
|
||||
operations (e.g. defining or testing the structure of the data). Most
|
||||
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
|
||||
encoding/decoding serialised ASN.1 contents.
|
||||
|
||||
The PyASN1 value objects can **additionally** participate in many operations
|
||||
involving regular Python objects (e.g. arithmetic, comprehension etc).
|
||||
"""
|
||||
return self._value is not noValue
|
||||
|
||||
def clone(self, value=noValue, **kwargs):
|
||||
"""Create a modified version of |ASN.1| schema or value object.
|
||||
|
||||
The `clone()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all arguments
|
||||
of the `clone()` method are optional.
|
||||
|
||||
Whatever arguments are supplied, they are used to create a copy
|
||||
of `self` taking precedence over the ones used to instantiate `self`.
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the immutable nature of the |ASN.1| object, if no arguments
|
||||
are supplied, no new |ASN.1| object will be created and `self` will
|
||||
be returned instead.
|
||||
"""
|
||||
if value is noValue:
|
||||
if not kwargs:
|
||||
return self
|
||||
|
||||
value = self._value
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
initializers.update(kwargs)
|
||||
|
||||
return self.__class__(value, **initializers)
|
||||
|
||||
def subtype(self, value=noValue, **kwargs):
|
||||
"""Create a specialization of |ASN.1| schema or value object.
|
||||
|
||||
The subtype relationship between ASN.1 types has no correlation with
|
||||
subtype relationship between Python types. ASN.1 type is mainly identified
|
||||
by its tag(s) (:py:class:`~pyasn1.type.tag.TagSet`) and value range
|
||||
constraints (:py:class:`~pyasn1.type.constraint.ConstraintsIntersection`).
|
||||
These ASN.1 type properties are implemented as |ASN.1| attributes.
|
||||
|
||||
The `subtype()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all parameters
|
||||
of the `subtype()` method are optional.
|
||||
|
||||
With the exception of the arguments described below, the rest of
|
||||
supplied arguments they are used to create a copy of `self` taking
|
||||
precedence over the ones used to instantiate `self`.
|
||||
|
||||
The following arguments to `subtype()` create a ASN.1 subtype out of
|
||||
|ASN.1| type:
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Implicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Explicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
Add ASN.1 constraints object to one of the `self`'s, then
|
||||
use the result as new object's ASN.1 constraints.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| schema or value object
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the immutable nature of the |ASN.1| object, if no arguments
|
||||
are supplied, no new |ASN.1| object will be created and `self` will
|
||||
be returned instead.
|
||||
"""
|
||||
if value is noValue:
|
||||
if not kwargs:
|
||||
return self
|
||||
|
||||
value = self._value
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
|
||||
implicitTag = kwargs.pop('implicitTag', None)
|
||||
if implicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
|
||||
|
||||
explicitTag = kwargs.pop('explicitTag', None)
|
||||
if explicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
|
||||
|
||||
for arg, option in kwargs.items():
|
||||
initializers[arg] += option
|
||||
|
||||
return self.__class__(value, **initializers)
|
||||
|
||||
def prettyIn(self, value):
|
||||
return value
|
||||
|
||||
def prettyOut(self, value):
|
||||
return str(value)
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
return self.prettyOut(self._value)
|
||||
|
||||
def prettyPrintType(self, scope=0):
|
||||
return '%s -> %s' % (self.tagSet, self.__class__.__name__)
|
||||
|
||||
# Backward compatibility
|
||||
AbstractSimpleAsn1Item = SimpleAsn1Type
|
||||
|
||||
#
|
||||
# Constructed types:
|
||||
# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
|
||||
# * ASN1 types and values are represened by Python class instances
|
||||
# * Value initialization is made for defaulted components only
|
||||
# * Primary method of component addressing is by-position. Data model for base
|
||||
# type is Python sequence. Additional type-specific addressing methods
|
||||
# may be implemented for particular types.
|
||||
# * SequenceOf and SetOf types do not implement any additional methods
|
||||
# * Sequence, Set and Choice types also implement by-identifier addressing
|
||||
# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
|
||||
# * Sequence and Set types may include optional and defaulted
|
||||
# components
|
||||
# * Constructed types hold a reference to component types used for value
|
||||
# verification and ordering.
|
||||
# * Component type is a scalar type for SequenceOf/SetOf types and a list
|
||||
# of types for Sequence/Set/Choice.
|
||||
#
|
||||
|
||||
|
||||
class ConstructedAsn1Type(Asn1Type):
|
||||
"""Base class for all constructed classes representing ASN.1 types.
|
||||
|
||||
ASN.1 distinguishes types by their ability to hold other objects.
|
||||
Those "nesting" types are known as *constructed* in ASN.1.
|
||||
|
||||
In the user code, |ASN.1| class is normally used only for telling
|
||||
ASN.1 objects from others.
|
||||
|
||||
Note
|
||||
----
|
||||
For as long as ASN.1 is concerned, a way to compare ASN.1 types
|
||||
is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
|
||||
"""
|
||||
|
||||
#: If :obj:`True`, requires exact component type matching,
|
||||
#: otherwise subtype relation is only enforced
|
||||
strictConstraints = False
|
||||
|
||||
componentType = None
|
||||
|
||||
# backward compatibility, unused
|
||||
sizeSpec = constraint.ConstraintsIntersection()
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
readOnly = {
|
||||
'componentType': self.componentType,
|
||||
# backward compatibility, unused
|
||||
'sizeSpec': self.sizeSpec
|
||||
}
|
||||
|
||||
# backward compatibility: preserve legacy sizeSpec support
|
||||
kwargs = self._moveSizeSpec(**kwargs)
|
||||
|
||||
readOnly.update(kwargs)
|
||||
|
||||
Asn1Type.__init__(self, **readOnly)
|
||||
|
||||
def _moveSizeSpec(self, **kwargs):
|
||||
# backward compatibility, unused
|
||||
sizeSpec = kwargs.pop('sizeSpec', self.sizeSpec)
|
||||
if sizeSpec:
|
||||
subtypeSpec = kwargs.pop('subtypeSpec', self.subtypeSpec)
|
||||
if subtypeSpec:
|
||||
subtypeSpec = sizeSpec
|
||||
|
||||
else:
|
||||
subtypeSpec += sizeSpec
|
||||
|
||||
kwargs['subtypeSpec'] = subtypeSpec
|
||||
|
||||
return kwargs
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s %s object' % (
|
||||
self.__class__.__name__, self.isValue and 'value' or 'schema'
|
||||
)
|
||||
|
||||
for attr, value in self.readOnly.items():
|
||||
if value is not noValue:
|
||||
representation += ', %s=%r' % (attr, value)
|
||||
|
||||
if self.isValue and self.components:
|
||||
representation += ', payload [%s]' % ', '.join(
|
||||
[repr(x) for x in self.components])
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other or self.components == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.components != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.components < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.components <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.components > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.components >= other
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return bool(self.components)
|
||||
else:
|
||||
def __bool__(self):
|
||||
return bool(self.components)
|
||||
|
||||
@property
|
||||
def components(self):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def _cloneComponentValues(self, myClone, cloneValueFlag):
|
||||
pass
|
||||
|
||||
def clone(self, **kwargs):
|
||||
"""Create a modified version of |ASN.1| schema object.
|
||||
|
||||
The `clone()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all arguments
|
||||
of the `clone()` method are optional.
|
||||
|
||||
Whatever arguments are supplied, they are used to create a copy
|
||||
of `self` taking precedence over the ones used to instantiate `self`.
|
||||
|
||||
Possible values of `self` are never copied over thus `clone()` can
|
||||
only create a new schema object.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| type/value
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the mutable nature of the |ASN.1| object, even if no arguments
|
||||
are supplied, a new |ASN.1| object will be created and returned.
|
||||
"""
|
||||
cloneValueFlag = kwargs.pop('cloneValueFlag', False)
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
initializers.update(kwargs)
|
||||
|
||||
clone = self.__class__(**initializers)
|
||||
|
||||
if cloneValueFlag:
|
||||
self._cloneComponentValues(clone, cloneValueFlag)
|
||||
|
||||
return clone
|
||||
|
||||
def subtype(self, **kwargs):
|
||||
"""Create a specialization of |ASN.1| schema object.
|
||||
|
||||
The `subtype()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all parameters
|
||||
of the `subtype()` method are optional.
|
||||
|
||||
With the exception of the arguments described below, the rest of
|
||||
supplied arguments they are used to create a copy of `self` taking
|
||||
precedence over the ones used to instantiate `self`.
|
||||
|
||||
The following arguments to `subtype()` create a ASN.1 subtype out of
|
||||
|ASN.1| type.
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Implicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Explicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
Add ASN.1 constraints object to one of the `self`'s, then
|
||||
use the result as new object's ASN.1 constraints.
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| type/value
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the mutable nature of the |ASN.1| object, even if no arguments
|
||||
are supplied, a new |ASN.1| object will be created and returned.
|
||||
"""
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
|
||||
cloneValueFlag = kwargs.pop('cloneValueFlag', False)
|
||||
|
||||
implicitTag = kwargs.pop('implicitTag', None)
|
||||
if implicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
|
||||
|
||||
explicitTag = kwargs.pop('explicitTag', None)
|
||||
if explicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
|
||||
|
||||
for arg, option in kwargs.items():
|
||||
initializers[arg] += option
|
||||
|
||||
clone = self.__class__(**initializers)
|
||||
|
||||
if cloneValueFlag:
|
||||
self._cloneComponentValues(clone, cloneValueFlag)
|
||||
|
||||
return clone
|
||||
|
||||
def getComponentByPosition(self, idx):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def setComponentByPosition(self, idx, value, verifyConstraints=True):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def setComponents(self, *args, **kwargs):
|
||||
for idx, value in enumerate(args):
|
||||
self[idx] = value
|
||||
for k in kwargs:
|
||||
self[k] = kwargs[k]
|
||||
return self
|
||||
|
||||
# backward compatibility
|
||||
|
||||
def setDefaultComponents(self):
|
||||
pass
|
||||
|
||||
def getComponentType(self):
|
||||
return self.componentType
|
||||
|
||||
# backward compatibility, unused
|
||||
def verifySizeSpec(self):
|
||||
self.subtypeSpec(self)
|
||||
|
||||
|
||||
# Backward compatibility
|
||||
AbstractConstructedAsn1Item = ConstructedAsn1Type
|
||||
335
lib/pyasn1/type/char.py
Normal file
335
lib/pyasn1/type/char.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString',
|
||||
'IA5String', 'GraphicString', 'VisibleString', 'ISO646String',
|
||||
'GeneralString', 'UniversalString', 'BMPString', 'UTF8String']
|
||||
|
||||
NoValue = univ.NoValue
|
||||
noValue = univ.noValue
|
||||
|
||||
|
||||
class AbstractCharacterString(univ.OctetString):
|
||||
"""Creates |ASN.1| schema or value object.
|
||||
|
||||
|ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`,
|
||||
its objects are immutable and duck-type Python 2 :class:`str` or Python 3
|
||||
:class:`bytes`. When used in octet-stream context, |ASN.1| type assumes
|
||||
"|encoding|" encoding.
|
||||
|
||||
Keyword Args
|
||||
------------
|
||||
value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
|
||||
:class:`unicode` object (Python 2) or :class:`str` (Python 3),
|
||||
alternatively :class:`str` (Python 2) or :class:`bytes` (Python 3)
|
||||
representing octet-stream of serialised unicode string
|
||||
(note `encoding` parameter) or |ASN.1| class instance.
|
||||
If `value` is not given, schema object will be created.
|
||||
|
||||
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
|
||||
Object representing non-default ASN.1 tag(s)
|
||||
|
||||
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
Object representing non-default ASN.1 subtype constraint(s). Constraints
|
||||
verification for |ASN.1| type occurs automatically on object
|
||||
instantiation.
|
||||
|
||||
encoding: :py:class:`str`
|
||||
Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
|
||||
:class:`str` (Python 3) the payload when |ASN.1| object is used
|
||||
in octet-stream context.
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
|
||||
On constraint violation or bad initializer.
|
||||
"""
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __str__(self):
|
||||
try:
|
||||
# `str` is Py2 text representation
|
||||
return self._value.encode(self.encoding)
|
||||
|
||||
except UnicodeEncodeError:
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeEncodeError(
|
||||
"Can't encode string '%s' with codec "
|
||||
"%s" % (self._value, self.encoding), exc
|
||||
)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self._value)
|
||||
|
||||
def prettyIn(self, value):
|
||||
try:
|
||||
if isinstance(value, unicode):
|
||||
return value
|
||||
elif isinstance(value, str):
|
||||
return value.decode(self.encoding)
|
||||
elif isinstance(value, (tuple, list)):
|
||||
return self.prettyIn(''.join([chr(x) for x in value]))
|
||||
elif isinstance(value, univ.OctetString):
|
||||
return value.asOctets().decode(self.encoding)
|
||||
else:
|
||||
return unicode(value)
|
||||
|
||||
except (UnicodeDecodeError, LookupError):
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeDecodeError(
|
||||
"Can't decode string '%s' with codec "
|
||||
"%s" % (value, self.encoding), exc
|
||||
)
|
||||
|
||||
def asOctets(self, padding=True):
|
||||
return str(self)
|
||||
|
||||
def asNumbers(self, padding=True):
|
||||
return tuple([ord(x) for x in str(self)])
|
||||
|
||||
else:
|
||||
def __str__(self):
|
||||
# `unicode` is Py3 text representation
|
||||
return str(self._value)
|
||||
|
||||
def __bytes__(self):
|
||||
try:
|
||||
return self._value.encode(self.encoding)
|
||||
except UnicodeEncodeError:
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeEncodeError(
|
||||
"Can't encode string '%s' with codec "
|
||||
"%s" % (self._value, self.encoding), exc
|
||||
)
|
||||
|
||||
def prettyIn(self, value):
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
elif isinstance(value, bytes):
|
||||
return value.decode(self.encoding)
|
||||
elif isinstance(value, (tuple, list)):
|
||||
return self.prettyIn(bytes(value))
|
||||
elif isinstance(value, univ.OctetString):
|
||||
return value.asOctets().decode(self.encoding)
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
except (UnicodeDecodeError, LookupError):
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeDecodeError(
|
||||
"Can't decode string '%s' with codec "
|
||||
"%s" % (value, self.encoding), exc
|
||||
)
|
||||
|
||||
def asOctets(self, padding=True):
|
||||
return bytes(self)
|
||||
|
||||
def asNumbers(self, padding=True):
|
||||
return tuple(bytes(self))
|
||||
|
||||
#
|
||||
# See OctetString.prettyPrint() for the explanation
|
||||
#
|
||||
|
||||
def prettyOut(self, value):
|
||||
return value
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
# first see if subclass has its own .prettyOut()
|
||||
value = self.prettyOut(self._value)
|
||||
|
||||
if value is not self._value:
|
||||
return value
|
||||
|
||||
return AbstractCharacterString.__str__(self)
|
||||
|
||||
def __reversed__(self):
|
||||
return reversed(self._value)
|
||||
|
||||
|
||||
class NumericString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class PrintableString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class TeletexString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class T61String(TeletexString):
|
||||
__doc__ = TeletexString.__doc__
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class VideotexString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class IA5String(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class GraphicString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class VisibleString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class ISO646String(VisibleString):
|
||||
__doc__ = VisibleString.__doc__
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
class GeneralString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class UniversalString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
|
||||
)
|
||||
encoding = "utf-32-be"
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class BMPString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
|
||||
)
|
||||
encoding = "utf-16-be"
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class UTF8String(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
|
||||
)
|
||||
encoding = "utf-8"
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
756
lib/pyasn1/type/constraint.py
Normal file
756
lib/pyasn1/type/constraint.py
Normal file
@@ -0,0 +1,756 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
# Original concept and code by Mike C. Fletcher.
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1.type import error
|
||||
|
||||
__all__ = ['SingleValueConstraint', 'ContainedSubtypeConstraint',
|
||||
'ValueRangeConstraint', 'ValueSizeConstraint',
|
||||
'PermittedAlphabetConstraint', 'InnerTypeConstraint',
|
||||
'ConstraintsExclusion', 'ConstraintsIntersection',
|
||||
'ConstraintsUnion']
|
||||
|
||||
|
||||
class AbstractConstraint(object):
|
||||
|
||||
def __init__(self, *values):
|
||||
self._valueMap = set()
|
||||
self._setValues(values)
|
||||
self.__hash = hash((self.__class__.__name__, self._values))
|
||||
|
||||
def __call__(self, value, idx=None):
|
||||
if not self._values:
|
||||
return
|
||||
|
||||
try:
|
||||
self._testValue(value, idx)
|
||||
|
||||
except error.ValueConstraintError:
|
||||
raise error.ValueConstraintError(
|
||||
'%s failed at: %r' % (self, sys.exc_info()[1])
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s object' % (self.__class__.__name__)
|
||||
|
||||
if self._values:
|
||||
representation += ', consts %s' % ', '.join(
|
||||
[repr(x) for x in self._values])
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._values == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._values != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._values < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self._values <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._values > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._values >= other
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return self._values and True or False
|
||||
else:
|
||||
def __bool__(self):
|
||||
return self._values and True or False
|
||||
|
||||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
# Constraints derivation logic
|
||||
def getValueMap(self):
|
||||
return self._valueMap
|
||||
|
||||
def isSuperTypeOf(self, otherConstraint):
|
||||
# TODO: fix possible comparison of set vs scalars here
|
||||
return (otherConstraint is self or
|
||||
not self._values or
|
||||
otherConstraint == self or
|
||||
self in otherConstraint.getValueMap())
|
||||
|
||||
def isSubTypeOf(self, otherConstraint):
|
||||
return (otherConstraint is self or
|
||||
not self or
|
||||
otherConstraint == self or
|
||||
otherConstraint in self._valueMap)
|
||||
|
||||
|
||||
class SingleValueConstraint(AbstractConstraint):
|
||||
"""Create a SingleValueConstraint object.
|
||||
|
||||
The SingleValueConstraint satisfies any value that
|
||||
is present in the set of permitted values.
|
||||
|
||||
Objects of this type are iterable (emitting constraint values) and
|
||||
can act as operands for some arithmetic operations e.g. addition
|
||||
and subtraction. The latter can be used for combining multiple
|
||||
SingleValueConstraint objects into one.
|
||||
|
||||
The SingleValueConstraint object can be applied to
|
||||
any ASN.1 type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*values: :class:`int`
|
||||
Full set of values permitted by this constraint object.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class DivisorOfSix(Integer):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Divisor-Of-6 ::= INTEGER (1 | 2 | 3 | 6)
|
||||
'''
|
||||
subtypeSpec = SingleValueConstraint(1, 2, 3, 6)
|
||||
|
||||
# this will succeed
|
||||
divisor_of_six = DivisorOfSix(1)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
divisor_of_six = DivisorOfSix(7)
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
self._set = set(values)
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if value not in self._set:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
# Constrains can be merged or reduced
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._set
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._set)
|
||||
|
||||
def __sub__(self, constraint):
|
||||
return self.__class__(*(self._set.difference(constraint)))
|
||||
|
||||
def __add__(self, constraint):
|
||||
return self.__class__(*(self._set.union(constraint)))
|
||||
|
||||
def __sub__(self, constraint):
|
||||
return self.__class__(*(self._set.difference(constraint)))
|
||||
|
||||
|
||||
class ContainedSubtypeConstraint(AbstractConstraint):
|
||||
"""Create a ContainedSubtypeConstraint object.
|
||||
|
||||
The ContainedSubtypeConstraint satisfies any value that
|
||||
is present in the set of permitted values and also
|
||||
satisfies included constraints.
|
||||
|
||||
The ContainedSubtypeConstraint object can be applied to
|
||||
any ASN.1 type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*values:
|
||||
Full set of values and constraint objects permitted
|
||||
by this constraint object.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class DivisorOfEighteen(Integer):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Divisors-of-18 ::= INTEGER (INCLUDES Divisors-of-6 | 9 | 18)
|
||||
'''
|
||||
subtypeSpec = ContainedSubtypeConstraint(
|
||||
SingleValueConstraint(1, 2, 3, 6), 9, 18
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
divisor_of_eighteen = DivisorOfEighteen(9)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
divisor_of_eighteen = DivisorOfEighteen(10)
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
if isinstance(constraint, AbstractConstraint):
|
||||
constraint(value, idx)
|
||||
elif value not in self._set:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class ValueRangeConstraint(AbstractConstraint):
|
||||
"""Create a ValueRangeConstraint object.
|
||||
|
||||
The ValueRangeConstraint satisfies any value that
|
||||
falls in the range of permitted values.
|
||||
|
||||
The ValueRangeConstraint object can only be applied
|
||||
to :class:`~pyasn1.type.univ.Integer` and
|
||||
:class:`~pyasn1.type.univ.Real` types.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
start: :class:`int`
|
||||
Minimum permitted value in the range (inclusive)
|
||||
|
||||
end: :class:`int`
|
||||
Maximum permitted value in the range (inclusive)
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class TeenAgeYears(Integer):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
TeenAgeYears ::= INTEGER (13 .. 19)
|
||||
'''
|
||||
subtypeSpec = ValueRangeConstraint(13, 19)
|
||||
|
||||
# this will succeed
|
||||
teen_year = TeenAgeYears(18)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
teen_year = TeenAgeYears(20)
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
if value < self.start or value > self.stop:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
if len(values) != 2:
|
||||
raise error.PyAsn1Error(
|
||||
'%s: bad constraint values' % (self.__class__.__name__,)
|
||||
)
|
||||
self.start, self.stop = values
|
||||
if self.start > self.stop:
|
||||
raise error.PyAsn1Error(
|
||||
'%s: screwed constraint values (start > stop): %s > %s' % (
|
||||
self.__class__.__name__,
|
||||
self.start, self.stop
|
||||
)
|
||||
)
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
class ValueSizeConstraint(ValueRangeConstraint):
|
||||
"""Create a ValueSizeConstraint object.
|
||||
|
||||
The ValueSizeConstraint satisfies any value for
|
||||
as long as its size falls within the range of
|
||||
permitted sizes.
|
||||
|
||||
The ValueSizeConstraint object can be applied
|
||||
to :class:`~pyasn1.type.univ.BitString`,
|
||||
:class:`~pyasn1.type.univ.OctetString` (including
|
||||
all :ref:`character ASN.1 types <type.char>`),
|
||||
:class:`~pyasn1.type.univ.SequenceOf`
|
||||
and :class:`~pyasn1.type.univ.SetOf` types.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
minimum: :class:`int`
|
||||
Minimum permitted size of the value (inclusive)
|
||||
|
||||
maximum: :class:`int`
|
||||
Maximum permitted size of the value (inclusive)
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class BaseballTeamRoster(SetOf):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
BaseballTeamRoster ::= SET SIZE (1..25) OF PlayerNames
|
||||
'''
|
||||
componentType = PlayerNames()
|
||||
subtypeSpec = ValueSizeConstraint(1, 25)
|
||||
|
||||
# this will succeed
|
||||
team = BaseballTeamRoster()
|
||||
team.extend(['Jan', 'Matej'])
|
||||
encode(team)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
team = BaseballTeamRoster()
|
||||
team.extend(['Jan'] * 26)
|
||||
encode(team)
|
||||
|
||||
Note
|
||||
----
|
||||
Whenever ValueSizeConstraint is applied to mutable types
|
||||
(e.g. :class:`~pyasn1.type.univ.SequenceOf`,
|
||||
:class:`~pyasn1.type.univ.SetOf`), constraint
|
||||
validation only happens at the serialisation phase rather
|
||||
than schema instantiation phase (as it is with immutable
|
||||
types).
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
valueSize = len(value)
|
||||
if valueSize < self.start or valueSize > self.stop:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class PermittedAlphabetConstraint(SingleValueConstraint):
|
||||
"""Create a PermittedAlphabetConstraint object.
|
||||
|
||||
The PermittedAlphabetConstraint satisfies any character
|
||||
string for as long as all its characters are present in
|
||||
the set of permitted characters.
|
||||
|
||||
Objects of this type are iterable (emitting constraint values) and
|
||||
can act as operands for some arithmetic operations e.g. addition
|
||||
and subtraction.
|
||||
|
||||
The PermittedAlphabetConstraint object can only be applied
|
||||
to the :ref:`character ASN.1 types <type.char>` such as
|
||||
:class:`~pyasn1.type.char.IA5String`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*alphabet: :class:`str`
|
||||
Full set of characters permitted by this constraint object.
|
||||
|
||||
Example
|
||||
-------
|
||||
.. code-block:: python
|
||||
|
||||
class BooleanValue(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
BooleanValue ::= IA5String (FROM ('T' | 'F'))
|
||||
'''
|
||||
subtypeSpec = PermittedAlphabetConstraint('T', 'F')
|
||||
|
||||
# this will succeed
|
||||
truth = BooleanValue('T')
|
||||
truth = BooleanValue('TF')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
garbage = BooleanValue('TAF')
|
||||
|
||||
ASN.1 `FROM ... EXCEPT ...` clause can be modelled by combining multiple
|
||||
PermittedAlphabetConstraint objects into one:
|
||||
|
||||
Example
|
||||
-------
|
||||
.. code-block:: python
|
||||
|
||||
class Lipogramme(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Lipogramme ::=
|
||||
IA5String (FROM (ALL EXCEPT ("e"|"E")))
|
||||
'''
|
||||
subtypeSpec = (
|
||||
PermittedAlphabetConstraint(*string.printable) -
|
||||
PermittedAlphabetConstraint('e', 'E')
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
lipogramme = Lipogramme('A work of fiction?')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
lipogramme = Lipogramme('Eel')
|
||||
|
||||
Note
|
||||
----
|
||||
Although `ConstraintsExclusion` object could seemingly be used for this
|
||||
purpose, practically, for it to work, it needs to represent its operand
|
||||
constraints as sets and intersect one with the other. That would require
|
||||
the insight into the constraint values (and their types) that are otherwise
|
||||
hidden inside the constraint object.
|
||||
|
||||
Therefore it's more practical to model `EXCEPT` clause at
|
||||
`PermittedAlphabetConstraint` level instead.
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
self._set = set(values)
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if not self._set.issuperset(value):
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class ComponentPresentConstraint(AbstractConstraint):
|
||||
"""Create a ComponentPresentConstraint object.
|
||||
|
||||
The ComponentPresentConstraint is only satisfied when the value
|
||||
is not `None`.
|
||||
|
||||
The ComponentPresentConstraint object is typically used with
|
||||
`WithComponentsConstraint`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
present = ComponentPresentConstraint()
|
||||
|
||||
# this will succeed
|
||||
present('whatever')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
present(None)
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = ('<must be present>',)
|
||||
|
||||
if values:
|
||||
raise error.PyAsn1Error('No arguments expected')
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if value is None:
|
||||
raise error.ValueConstraintError(
|
||||
'Component is not present:')
|
||||
|
||||
|
||||
class ComponentAbsentConstraint(AbstractConstraint):
|
||||
"""Create a ComponentAbsentConstraint object.
|
||||
|
||||
The ComponentAbsentConstraint is only satisfied when the value
|
||||
is `None`.
|
||||
|
||||
The ComponentAbsentConstraint object is typically used with
|
||||
`WithComponentsConstraint`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
absent = ComponentAbsentConstraint()
|
||||
|
||||
# this will succeed
|
||||
absent(None)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
absent('whatever')
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = ('<must be absent>',)
|
||||
|
||||
if values:
|
||||
raise error.PyAsn1Error('No arguments expected')
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if value is not None:
|
||||
raise error.ValueConstraintError(
|
||||
'Component is not absent: %r' % value)
|
||||
|
||||
|
||||
class WithComponentsConstraint(AbstractConstraint):
|
||||
"""Create a WithComponentsConstraint object.
|
||||
|
||||
The `WithComponentsConstraint` satisfies any mapping object that has
|
||||
constrained fields present or absent, what is indicated by
|
||||
`ComponentPresentConstraint` and `ComponentAbsentConstraint`
|
||||
objects respectively.
|
||||
|
||||
The `WithComponentsConstraint` object is typically applied
|
||||
to :class:`~pyasn1.type.univ.Set` or
|
||||
:class:`~pyasn1.type.univ.Sequence` types.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*fields: :class:`tuple`
|
||||
Zero or more tuples of (`field`, `constraint`) indicating constrained
|
||||
fields.
|
||||
|
||||
Notes
|
||||
-----
|
||||
On top of the primary use of `WithComponentsConstraint` (ensuring presence
|
||||
or absence of particular components of a :class:`~pyasn1.type.univ.Set` or
|
||||
:class:`~pyasn1.type.univ.Sequence`), it is also possible to pass any other
|
||||
constraint objects or their combinations. In case of scalar fields, these
|
||||
constraints will be verified in addition to the constraints belonging to
|
||||
scalar components themselves. However, formally, these additional
|
||||
constraints do not change the type of these ASN.1 objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Item(Sequence): # Set is similar
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Item ::= SEQUENCE {
|
||||
id INTEGER OPTIONAL,
|
||||
name OCTET STRING OPTIONAL
|
||||
} WITH COMPONENTS id PRESENT, name ABSENT | id ABSENT, name PRESENT
|
||||
'''
|
||||
componentType = NamedTypes(
|
||||
OptionalNamedType('id', Integer()),
|
||||
OptionalNamedType('name', OctetString())
|
||||
)
|
||||
withComponents = ConstraintsUnion(
|
||||
WithComponentsConstraint(
|
||||
('id', ComponentPresentConstraint()),
|
||||
('name', ComponentAbsentConstraint())
|
||||
),
|
||||
WithComponentsConstraint(
|
||||
('id', ComponentAbsentConstraint()),
|
||||
('name', ComponentPresentConstraint())
|
||||
)
|
||||
)
|
||||
|
||||
item = Item()
|
||||
|
||||
# This will succeed
|
||||
item['id'] = 1
|
||||
|
||||
# This will succeed
|
||||
item.reset()
|
||||
item['name'] = 'John'
|
||||
|
||||
# This will fail (on encoding)
|
||||
item.reset()
|
||||
descr['id'] = 1
|
||||
descr['name'] = 'John'
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for field, constraint in self._values:
|
||||
constraint(value.get(field))
|
||||
|
||||
def _setValues(self, values):
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
# This is a bit kludgy, meaning two op modes within a single constraint
|
||||
class InnerTypeConstraint(AbstractConstraint):
|
||||
"""Value must satisfy the type and presence constraints"""
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if self.__singleTypeConstraint:
|
||||
self.__singleTypeConstraint(value)
|
||||
elif self.__multipleTypeConstraint:
|
||||
if idx not in self.__multipleTypeConstraint:
|
||||
raise error.ValueConstraintError(value)
|
||||
constraint, status = self.__multipleTypeConstraint[idx]
|
||||
if status == 'ABSENT': # XXX presence is not checked!
|
||||
raise error.ValueConstraintError(value)
|
||||
constraint(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
self.__multipleTypeConstraint = {}
|
||||
self.__singleTypeConstraint = None
|
||||
for v in values:
|
||||
if isinstance(v, tuple):
|
||||
self.__multipleTypeConstraint[v[0]] = v[1], v[2]
|
||||
else:
|
||||
self.__singleTypeConstraint = v
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
# Logic operations on constraints
|
||||
|
||||
class ConstraintsExclusion(AbstractConstraint):
|
||||
"""Create a ConstraintsExclusion logic operator object.
|
||||
|
||||
The ConstraintsExclusion logic operator succeeds when the
|
||||
value does *not* satisfy the operand constraint.
|
||||
|
||||
The ConstraintsExclusion object can be applied to
|
||||
any constraint and logic operator object.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*constraints:
|
||||
Constraint or logic operator objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class LuckyNumber(Integer):
|
||||
subtypeSpec = ConstraintsExclusion(
|
||||
SingleValueConstraint(13)
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
luckyNumber = LuckyNumber(12)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
luckyNumber = LuckyNumber(13)
|
||||
|
||||
Note
|
||||
----
|
||||
The `FROM ... EXCEPT ...` ASN.1 clause should be modeled by combining
|
||||
constraint objects into one. See `PermittedAlphabetConstraint` for more
|
||||
information.
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
try:
|
||||
constraint(value, idx)
|
||||
|
||||
except error.ValueConstraintError:
|
||||
continue
|
||||
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
class AbstractConstraintSet(AbstractConstraint):
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self._values[idx]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._values)
|
||||
|
||||
def __add__(self, value):
|
||||
return self.__class__(*(self._values + (value,)))
|
||||
|
||||
def __radd__(self, value):
|
||||
return self.__class__(*((value,) + self._values))
|
||||
|
||||
def __len__(self):
|
||||
return len(self._values)
|
||||
|
||||
# Constraints inclusion in sets
|
||||
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
for constraint in values:
|
||||
if constraint:
|
||||
self._valueMap.add(constraint)
|
||||
self._valueMap.update(constraint.getValueMap())
|
||||
|
||||
|
||||
class ConstraintsIntersection(AbstractConstraintSet):
|
||||
"""Create a ConstraintsIntersection logic operator object.
|
||||
|
||||
The ConstraintsIntersection logic operator only succeeds
|
||||
if *all* its operands succeed.
|
||||
|
||||
The ConstraintsIntersection object can be applied to
|
||||
any constraint and logic operator objects.
|
||||
|
||||
The ConstraintsIntersection object duck-types the immutable
|
||||
container object like Python :py:class:`tuple`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*constraints:
|
||||
Constraint or logic operator objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class CapitalAndSmall(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
CapitalAndSmall ::=
|
||||
IA5String (FROM ("A".."Z"|"a".."z"))
|
||||
'''
|
||||
subtypeSpec = ConstraintsIntersection(
|
||||
PermittedAlphabetConstraint('A', 'Z'),
|
||||
PermittedAlphabetConstraint('a', 'z')
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
capital_and_small = CapitalAndSmall('Hello')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
capital_and_small = CapitalAndSmall('hello')
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
constraint(value, idx)
|
||||
|
||||
|
||||
class ConstraintsUnion(AbstractConstraintSet):
|
||||
"""Create a ConstraintsUnion logic operator object.
|
||||
|
||||
The ConstraintsUnion logic operator succeeds if
|
||||
*at least* a single operand succeeds.
|
||||
|
||||
The ConstraintsUnion object can be applied to
|
||||
any constraint and logic operator objects.
|
||||
|
||||
The ConstraintsUnion object duck-types the immutable
|
||||
container object like Python :py:class:`tuple`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*constraints:
|
||||
Constraint or logic operator objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class CapitalOrSmall(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
CapitalOrSmall ::=
|
||||
IA5String (FROM ("A".."Z") | FROM ("a".."z"))
|
||||
'''
|
||||
subtypeSpec = ConstraintsUnion(
|
||||
PermittedAlphabetConstraint('A', 'Z'),
|
||||
PermittedAlphabetConstraint('a', 'z')
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
capital_or_small = CapitalAndSmall('Hello')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
capital_or_small = CapitalOrSmall('hello!')
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
try:
|
||||
constraint(value, idx)
|
||||
except error.ValueConstraintError:
|
||||
pass
|
||||
else:
|
||||
return
|
||||
|
||||
raise error.ValueConstraintError(
|
||||
'all of %s failed for "%s"' % (self._values, value)
|
||||
)
|
||||
|
||||
# TODO:
|
||||
# refactor InnerTypeConstraint
|
||||
# add tests for type check
|
||||
# implement other constraint types
|
||||
# make constraint validation easy to skip
|
||||
11
lib/pyasn1/type/error.py
Normal file
11
lib/pyasn1/type/error.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.error import PyAsn1Error
|
||||
|
||||
|
||||
class ValueConstraintError(PyAsn1Error):
|
||||
pass
|
||||
561
lib/pyasn1/type/namedtype.py
Normal file
561
lib/pyasn1/type/namedtype.py
Normal file
@@ -0,0 +1,561 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import tagmap
|
||||
|
||||
__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType',
|
||||
'NamedTypes']
|
||||
|
||||
try:
|
||||
any
|
||||
|
||||
except NameError:
|
||||
any = lambda x: bool(filter(bool, x))
|
||||
|
||||
|
||||
class NamedType(object):
|
||||
"""Create named field object for a constructed ASN.1 type.
|
||||
|
||||
The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type.
|
||||
|
||||
|NamedType| objects are immutable and duck-type Python :class:`tuple` objects
|
||||
holding *name* and *asn1Object* components.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name: :py:class:`str`
|
||||
Field name
|
||||
|
||||
asn1Object:
|
||||
ASN.1 type object
|
||||
"""
|
||||
isOptional = False
|
||||
isDefaulted = False
|
||||
|
||||
def __init__(self, name, asn1Object, openType=None):
|
||||
self.__name = name
|
||||
self.__type = asn1Object
|
||||
self.__nameAndType = name, asn1Object
|
||||
self.__openType = openType
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s=%r' % (self.name, self.asn1Object)
|
||||
|
||||
if self.openType:
|
||||
representation += ', open type %r' % self.openType
|
||||
|
||||
return '<%s object, type %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__nameAndType == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__nameAndType != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__nameAndType < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__nameAndType <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__nameAndType > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__nameAndType >= other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__nameAndType)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self.__nameAndType[idx]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__nameAndType)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
@property
|
||||
def asn1Object(self):
|
||||
return self.__type
|
||||
|
||||
@property
|
||||
def openType(self):
|
||||
return self.__openType
|
||||
|
||||
# Backward compatibility
|
||||
|
||||
def getName(self):
|
||||
return self.name
|
||||
|
||||
def getType(self):
|
||||
return self.asn1Object
|
||||
|
||||
|
||||
class OptionalNamedType(NamedType):
|
||||
__doc__ = NamedType.__doc__
|
||||
|
||||
isOptional = True
|
||||
|
||||
|
||||
class DefaultedNamedType(NamedType):
|
||||
__doc__ = NamedType.__doc__
|
||||
|
||||
isDefaulted = True
|
||||
|
||||
|
||||
class NamedTypes(object):
|
||||
"""Create a collection of named fields for a constructed ASN.1 type.
|
||||
|
||||
The NamedTypes object represents a collection of named fields of a constructed ASN.1 type.
|
||||
|
||||
*NamedTypes* objects are immutable and duck-type Python :class:`dict` objects
|
||||
holding *name* as keys and ASN.1 type object as values.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*namedTypes: :class:`~pyasn1.type.namedtype.NamedType`
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Description(Sequence):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Description ::= SEQUENCE {
|
||||
surname IA5String,
|
||||
first-name IA5String OPTIONAL,
|
||||
age INTEGER DEFAULT 40
|
||||
}
|
||||
'''
|
||||
componentType = NamedTypes(
|
||||
NamedType('surname', IA5String()),
|
||||
OptionalNamedType('first-name', IA5String()),
|
||||
DefaultedNamedType('age', Integer(40))
|
||||
)
|
||||
|
||||
descr = Description()
|
||||
descr['surname'] = 'Smith'
|
||||
descr['first-name'] = 'John'
|
||||
"""
|
||||
def __init__(self, *namedTypes, **kwargs):
|
||||
self.__namedTypes = namedTypes
|
||||
self.__namedTypesLen = len(self.__namedTypes)
|
||||
self.__minTagSet = self.__computeMinTagSet()
|
||||
self.__nameToPosMap = self.__computeNameToPosMap()
|
||||
self.__tagToPosMap = self.__computeTagToPosMap()
|
||||
self.__ambiguousTypes = 'terminal' not in kwargs and self.__computeAmbiguousTypes() or {}
|
||||
self.__uniqueTagMap = self.__computeTagMaps(unique=True)
|
||||
self.__nonUniqueTagMap = self.__computeTagMaps(unique=False)
|
||||
self.__hasOptionalOrDefault = any([True for namedType in self.__namedTypes
|
||||
if namedType.isDefaulted or namedType.isOptional])
|
||||
self.__hasOpenTypes = any([True for namedType in self.__namedTypes
|
||||
if namedType.openType])
|
||||
|
||||
self.__requiredComponents = frozenset(
|
||||
[idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted]
|
||||
)
|
||||
self.__keys = frozenset([namedType.name for namedType in self.__namedTypes])
|
||||
self.__values = tuple([namedType.asn1Object for namedType in self.__namedTypes])
|
||||
self.__items = tuple([(namedType.name, namedType.asn1Object) for namedType in self.__namedTypes])
|
||||
|
||||
def __repr__(self):
|
||||
representation = ', '.join(['%r' % x for x in self.__namedTypes])
|
||||
return '<%s object, types %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__namedTypes == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__namedTypes != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__namedTypes < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__namedTypes <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__namedTypes > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__namedTypes >= other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__namedTypes)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
try:
|
||||
return self.__namedTypes[idx]
|
||||
|
||||
except TypeError:
|
||||
return self.__namedTypes[self.__nameToPosMap[idx]]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__nameToPosMap
|
||||
|
||||
def __iter__(self):
|
||||
return (x[0] for x in self.__namedTypes)
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return self.__namedTypesLen > 0
|
||||
else:
|
||||
def __bool__(self):
|
||||
return self.__namedTypesLen > 0
|
||||
|
||||
def __len__(self):
|
||||
return self.__namedTypesLen
|
||||
|
||||
# Python dict protocol
|
||||
|
||||
def values(self):
|
||||
return self.__values
|
||||
|
||||
def keys(self):
|
||||
return self.__keys
|
||||
|
||||
def items(self):
|
||||
return self.__items
|
||||
|
||||
def clone(self):
|
||||
return self.__class__(*self.__namedTypes)
|
||||
|
||||
class PostponedError(object):
|
||||
def __init__(self, errorMsg):
|
||||
self.__errorMsg = errorMsg
|
||||
|
||||
def __getitem__(self, item):
|
||||
raise error.PyAsn1Error(self.__errorMsg)
|
||||
|
||||
def __computeTagToPosMap(self):
|
||||
tagToPosMap = {}
|
||||
for idx, namedType in enumerate(self.__namedTypes):
|
||||
tagMap = namedType.asn1Object.tagMap
|
||||
if isinstance(tagMap, NamedTypes.PostponedError):
|
||||
return tagMap
|
||||
if not tagMap:
|
||||
continue
|
||||
for _tagSet in tagMap.presentTypes:
|
||||
if _tagSet in tagToPosMap:
|
||||
return NamedTypes.PostponedError('Duplicate component tag %s at %s' % (_tagSet, namedType))
|
||||
tagToPosMap[_tagSet] = idx
|
||||
|
||||
return tagToPosMap
|
||||
|
||||
def __computeNameToPosMap(self):
|
||||
nameToPosMap = {}
|
||||
for idx, namedType in enumerate(self.__namedTypes):
|
||||
if namedType.name in nameToPosMap:
|
||||
return NamedTypes.PostponedError('Duplicate component name %s at %s' % (namedType.name, namedType))
|
||||
nameToPosMap[namedType.name] = idx
|
||||
|
||||
return nameToPosMap
|
||||
|
||||
def __computeAmbiguousTypes(self):
|
||||
ambiguousTypes = {}
|
||||
partialAmbiguousTypes = ()
|
||||
for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))):
|
||||
if namedType.isOptional or namedType.isDefaulted:
|
||||
partialAmbiguousTypes = (namedType,) + partialAmbiguousTypes
|
||||
else:
|
||||
partialAmbiguousTypes = (namedType,)
|
||||
if len(partialAmbiguousTypes) == len(self.__namedTypes):
|
||||
ambiguousTypes[idx] = self
|
||||
else:
|
||||
ambiguousTypes[idx] = NamedTypes(*partialAmbiguousTypes, **dict(terminal=True))
|
||||
return ambiguousTypes
|
||||
|
||||
def getTypeByPosition(self, idx):
|
||||
"""Return ASN.1 type object by its position in fields set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idx: :py:class:`int`
|
||||
Field index
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
ASN.1 type
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If given position is out of fields range
|
||||
"""
|
||||
try:
|
||||
return self.__namedTypes[idx].asn1Object
|
||||
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionByType(self, tagSet):
|
||||
"""Return field position by its ASN.1 type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagSet: :class:`~pysnmp.type.tag.TagSet`
|
||||
ASN.1 tag set distinguishing one ASN.1 type from others.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
ASN.1 type position in fields set
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes*
|
||||
"""
|
||||
try:
|
||||
return self.__tagToPosMap[tagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type %s not found' % (tagSet,))
|
||||
|
||||
def getNameByPosition(self, idx):
|
||||
"""Return field name by its position in fields set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idx: :py:class:`idx`
|
||||
Field index
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`str`
|
||||
Field name
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If given field name is not present in callee *NamedTypes*
|
||||
"""
|
||||
try:
|
||||
return self.__namedTypes[idx].name
|
||||
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionByName(self, name):
|
||||
"""Return field position by filed name.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name: :py:class:`str`
|
||||
Field name
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Field position in fields set
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If *name* is not present or not unique within callee *NamedTypes*
|
||||
"""
|
||||
try:
|
||||
return self.__nameToPosMap[name]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Name %s not found' % (name,))
|
||||
|
||||
def getTagMapNearPosition(self, idx):
|
||||
"""Return ASN.1 types that are allowed at or past given field position.
|
||||
|
||||
Some ASN.1 serialisation allow for skipping optional and defaulted fields.
|
||||
Some constructed ASN.1 types allow reordering of the fields. When recovering
|
||||
such objects it may be important to know which types can possibly be
|
||||
present at any given position in the field sets.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idx: :py:class:`int`
|
||||
Field index
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tagmap.TagMap`
|
||||
Map if ASN.1 types allowed at given field position
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If given position is out of fields range
|
||||
"""
|
||||
try:
|
||||
return self.__ambiguousTypes[idx].tagMap
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionNearType(self, tagSet, idx):
|
||||
"""Return the closest field position where given ASN.1 type is allowed.
|
||||
|
||||
Some ASN.1 serialisation allow for skipping optional and defaulted fields.
|
||||
Some constructed ASN.1 types allow reordering of the fields. When recovering
|
||||
such objects it may be important to know at which field position, in field set,
|
||||
given *tagSet* is allowed at or past *idx* position.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagSet: :class:`~pyasn1.type.tag.TagSet`
|
||||
ASN.1 type which field position to look up
|
||||
|
||||
idx: :py:class:`int`
|
||||
Field position at or past which to perform ASN.1 type look up
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Field position in fields set
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If *tagSet* is not present or not unique within callee *NamedTypes*
|
||||
or *idx* is out of fields range
|
||||
"""
|
||||
try:
|
||||
return idx + self.__ambiguousTypes[idx].getPositionByType(tagSet)
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def __computeMinTagSet(self):
|
||||
minTagSet = None
|
||||
for namedType in self.__namedTypes:
|
||||
asn1Object = namedType.asn1Object
|
||||
|
||||
try:
|
||||
tagSet = asn1Object.minTagSet
|
||||
|
||||
except AttributeError:
|
||||
tagSet = asn1Object.tagSet
|
||||
|
||||
if minTagSet is None or tagSet < minTagSet:
|
||||
minTagSet = tagSet
|
||||
|
||||
return minTagSet or tag.TagSet()
|
||||
|
||||
@property
|
||||
def minTagSet(self):
|
||||
"""Return the minimal TagSet among ASN.1 type in callee *NamedTypes*.
|
||||
|
||||
Some ASN.1 types/serialisation protocols require ASN.1 types to be
|
||||
arranged based on their numerical tag value. The *minTagSet* property
|
||||
returns that.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tagset.TagSet`
|
||||
Minimal TagSet among ASN.1 types in callee *NamedTypes*
|
||||
"""
|
||||
return self.__minTagSet
|
||||
|
||||
def __computeTagMaps(self, unique):
|
||||
presentTypes = {}
|
||||
skipTypes = {}
|
||||
defaultType = None
|
||||
for namedType in self.__namedTypes:
|
||||
tagMap = namedType.asn1Object.tagMap
|
||||
if isinstance(tagMap, NamedTypes.PostponedError):
|
||||
return tagMap
|
||||
for tagSet in tagMap:
|
||||
if unique and tagSet in presentTypes:
|
||||
return NamedTypes.PostponedError('Non-unique tagSet %s of %s at %s' % (tagSet, namedType, self))
|
||||
presentTypes[tagSet] = namedType.asn1Object
|
||||
skipTypes.update(tagMap.skipTypes)
|
||||
|
||||
if defaultType is None:
|
||||
defaultType = tagMap.defaultType
|
||||
elif tagMap.defaultType is not None:
|
||||
return NamedTypes.PostponedError('Duplicate default ASN.1 type at %s' % (self,))
|
||||
|
||||
return tagmap.TagMap(presentTypes, skipTypes, defaultType)
|
||||
|
||||
@property
|
||||
def tagMap(self):
|
||||
"""Return a *TagMap* object from tags and types recursively.
|
||||
|
||||
Return a :class:`~pyasn1.type.tagmap.TagMap` object by
|
||||
combining tags from *TagMap* objects of children types and
|
||||
associating them with their immediate child type.
|
||||
|
||||
Example
|
||||
-------
|
||||
.. code-block:: python
|
||||
|
||||
OuterType ::= CHOICE {
|
||||
innerType INTEGER
|
||||
}
|
||||
|
||||
Calling *.tagMap* on *OuterType* will yield a map like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
Integer.tagSet -> Choice
|
||||
"""
|
||||
return self.__nonUniqueTagMap
|
||||
|
||||
@property
|
||||
def tagMapUnique(self):
|
||||
"""Return a *TagMap* object from unique tags and types recursively.
|
||||
|
||||
Return a :class:`~pyasn1.type.tagmap.TagMap` object by
|
||||
combining tags from *TagMap* objects of children types and
|
||||
associating them with their immediate child type.
|
||||
|
||||
Example
|
||||
-------
|
||||
.. code-block:: python
|
||||
|
||||
OuterType ::= CHOICE {
|
||||
innerType INTEGER
|
||||
}
|
||||
|
||||
Calling *.tagMapUnique* on *OuterType* will yield a map like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
Integer.tagSet -> Choice
|
||||
|
||||
Note
|
||||
----
|
||||
|
||||
Duplicate *TagSet* objects found in the tree of children
|
||||
types would cause error.
|
||||
"""
|
||||
return self.__uniqueTagMap
|
||||
|
||||
@property
|
||||
def hasOptionalOrDefault(self):
|
||||
return self.__hasOptionalOrDefault
|
||||
|
||||
@property
|
||||
def hasOpenTypes(self):
|
||||
return self.__hasOpenTypes
|
||||
|
||||
@property
|
||||
def namedTypes(self):
|
||||
return tuple(self.__namedTypes)
|
||||
|
||||
@property
|
||||
def requiredComponents(self):
|
||||
return self.__requiredComponents
|
||||
192
lib/pyasn1/type/namedval.py
Normal file
192
lib/pyasn1/type/namedval.py
Normal file
@@ -0,0 +1,192 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
# ASN.1 named integers
|
||||
#
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = ['NamedValues']
|
||||
|
||||
|
||||
class NamedValues(object):
|
||||
"""Create named values object.
|
||||
|
||||
The |NamedValues| object represents a collection of string names
|
||||
associated with numeric IDs. These objects are used for giving
|
||||
names to otherwise numerical values.
|
||||
|
||||
|NamedValues| objects are immutable and duck-type Python
|
||||
:class:`dict` object mapping ID to name and vice-versa.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*args: variable number of two-element :py:class:`tuple`
|
||||
|
||||
name: :py:class:`str`
|
||||
Value label
|
||||
|
||||
value: :py:class:`int`
|
||||
Numeric value
|
||||
|
||||
Keyword Args
|
||||
------------
|
||||
name: :py:class:`str`
|
||||
Value label
|
||||
|
||||
value: :py:class:`int`
|
||||
Numeric value
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> nv = NamedValues('a', 'b', ('c', 0), d=1)
|
||||
>>> nv
|
||||
>>> {'c': 0, 'd': 1, 'a': 2, 'b': 3}
|
||||
>>> nv[0]
|
||||
'c'
|
||||
>>> nv['a']
|
||||
2
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.__names = {}
|
||||
self.__numbers = {}
|
||||
|
||||
anonymousNames = []
|
||||
|
||||
for namedValue in args:
|
||||
if isinstance(namedValue, (tuple, list)):
|
||||
try:
|
||||
name, number = namedValue
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('Not a proper attribute-value pair %r' % (namedValue,))
|
||||
|
||||
else:
|
||||
anonymousNames.append(namedValue)
|
||||
continue
|
||||
|
||||
if name in self.__names:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
|
||||
if number in self.__numbers:
|
||||
raise error.PyAsn1Error('Duplicate number %s=%s' % (name, number))
|
||||
|
||||
self.__names[name] = number
|
||||
self.__numbers[number] = name
|
||||
|
||||
for name, number in kwargs.items():
|
||||
if name in self.__names:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
|
||||
if number in self.__numbers:
|
||||
raise error.PyAsn1Error('Duplicate number %s=%s' % (name, number))
|
||||
|
||||
self.__names[name] = number
|
||||
self.__numbers[number] = name
|
||||
|
||||
if anonymousNames:
|
||||
|
||||
number = self.__numbers and max(self.__numbers) + 1 or 0
|
||||
|
||||
for name in anonymousNames:
|
||||
|
||||
if name in self.__names:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
|
||||
self.__names[name] = number
|
||||
self.__numbers[number] = name
|
||||
|
||||
number += 1
|
||||
|
||||
def __repr__(self):
|
||||
representation = ', '.join(['%s=%d' % x for x in self.items()])
|
||||
|
||||
if len(representation) > 64:
|
||||
representation = representation[:32] + '...' + representation[-32:]
|
||||
|
||||
return '<%s object, enums %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return dict(self) == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return dict(self) != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return dict(self) < other
|
||||
|
||||
def __le__(self, other):
|
||||
return dict(self) <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return dict(self) > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return dict(self) >= other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.items())
|
||||
|
||||
# Python dict protocol (read-only)
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return self.__numbers[key]
|
||||
|
||||
except KeyError:
|
||||
return self.__names[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__names)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__names or key in self.__numbers
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__names)
|
||||
|
||||
def values(self):
|
||||
return iter(self.__numbers)
|
||||
|
||||
def keys(self):
|
||||
return iter(self.__names)
|
||||
|
||||
def items(self):
|
||||
for name in self.__names:
|
||||
yield name, self.__names[name]
|
||||
|
||||
# support merging
|
||||
|
||||
def __add__(self, namedValues):
|
||||
return self.__class__(*tuple(self.items()) + tuple(namedValues.items()))
|
||||
|
||||
# XXX clone/subtype?
|
||||
|
||||
def clone(self, *args, **kwargs):
|
||||
new = self.__class__(*args, **kwargs)
|
||||
return self + new
|
||||
|
||||
# legacy protocol
|
||||
|
||||
def getName(self, value):
|
||||
if value in self.__numbers:
|
||||
return self.__numbers[value]
|
||||
|
||||
def getValue(self, name):
|
||||
if name in self.__names:
|
||||
return self.__names[name]
|
||||
|
||||
def getValues(self, *names):
|
||||
try:
|
||||
return [self.__names[name] for name in names]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error(
|
||||
'Unknown bit identifier(s): %s' % (set(names).difference(self.__names),)
|
||||
)
|
||||
104
lib/pyasn1/type/opentype.py
Normal file
104
lib/pyasn1/type/opentype.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
|
||||
__all__ = ['OpenType']
|
||||
|
||||
|
||||
class OpenType(object):
|
||||
"""Create ASN.1 type map indexed by a value
|
||||
|
||||
The *OpenType* object models an untyped field of a constructed ASN.1
|
||||
type. In ASN.1 syntax it is usually represented by the
|
||||
`ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
|
||||
`SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
|
||||
used together with :class:`~pyasn1.type.univ.Any` object.
|
||||
|
||||
OpenType objects duck-type a read-only Python :class:`dict` objects,
|
||||
however the passed `typeMap` is not copied, but stored by reference.
|
||||
That means the user can manipulate `typeMap` at run time having this
|
||||
reflected on *OpenType* object behavior.
|
||||
|
||||
The |OpenType| class models an untyped field of a constructed ASN.1
|
||||
type. In ASN.1 syntax it is usually represented by the
|
||||
`ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
|
||||
`SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
|
||||
used with :class:`~pyasn1.type.univ.Any` type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name: :py:class:`str`
|
||||
Field name
|
||||
|
||||
typeMap: :py:class:`dict`
|
||||
A map of value->ASN.1 type. It's stored by reference and can be
|
||||
mutated later to register new mappings.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
For untyped scalars:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
openType = OpenType(
|
||||
'id', {1: Integer(),
|
||||
2: OctetString()}
|
||||
)
|
||||
Sequence(
|
||||
componentType=NamedTypes(
|
||||
NamedType('id', Integer()),
|
||||
NamedType('blob', Any(), openType=openType)
|
||||
)
|
||||
)
|
||||
|
||||
For untyped `SET OF` or `SEQUENCE OF` vectors:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
openType = OpenType(
|
||||
'id', {1: Integer(),
|
||||
2: OctetString()}
|
||||
)
|
||||
Sequence(
|
||||
componentType=NamedTypes(
|
||||
NamedType('id', Integer()),
|
||||
NamedType('blob', SetOf(componentType=Any()),
|
||||
openType=openType)
|
||||
)
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self, name, typeMap=None):
|
||||
self.__name = name
|
||||
if typeMap is None:
|
||||
self.__typeMap = {}
|
||||
else:
|
||||
self.__typeMap = typeMap
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
# Python dict protocol
|
||||
|
||||
def values(self):
|
||||
return self.__typeMap.values()
|
||||
|
||||
def keys(self):
|
||||
return self.__typeMap.keys()
|
||||
|
||||
def items(self):
|
||||
return self.__typeMap.items()
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__typeMap
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.__typeMap[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__typeMap)
|
||||
335
lib/pyasn1/type/tag.py
Normal file
335
lib/pyasn1/type/tag.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = ['tagClassUniversal', 'tagClassApplication', 'tagClassContext',
|
||||
'tagClassPrivate', 'tagFormatSimple', 'tagFormatConstructed',
|
||||
'tagCategoryImplicit', 'tagCategoryExplicit',
|
||||
'tagCategoryUntagged', 'Tag', 'TagSet']
|
||||
|
||||
#: Identifier for ASN.1 class UNIVERSAL
|
||||
tagClassUniversal = 0x00
|
||||
|
||||
#: Identifier for ASN.1 class APPLICATION
|
||||
tagClassApplication = 0x40
|
||||
|
||||
#: Identifier for ASN.1 class context-specific
|
||||
tagClassContext = 0x80
|
||||
|
||||
#: Identifier for ASN.1 class private
|
||||
tagClassPrivate = 0xC0
|
||||
|
||||
#: Identifier for "simple" ASN.1 structure (e.g. scalar)
|
||||
tagFormatSimple = 0x00
|
||||
|
||||
#: Identifier for "constructed" ASN.1 structure (e.g. may have inner components)
|
||||
tagFormatConstructed = 0x20
|
||||
|
||||
tagCategoryImplicit = 0x01
|
||||
tagCategoryExplicit = 0x02
|
||||
tagCategoryUntagged = 0x04
|
||||
|
||||
|
||||
class Tag(object):
|
||||
"""Create ASN.1 tag
|
||||
|
||||
Represents ASN.1 tag that can be attached to a ASN.1 type to make
|
||||
types distinguishable from each other.
|
||||
|
||||
*Tag* objects are immutable and duck-type Python :class:`tuple` objects
|
||||
holding three integer components of a tag.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagClass: :py:class:`int`
|
||||
Tag *class* value
|
||||
|
||||
tagFormat: :py:class:`int`
|
||||
Tag *format* value
|
||||
|
||||
tagId: :py:class:`int`
|
||||
Tag ID value
|
||||
"""
|
||||
def __init__(self, tagClass, tagFormat, tagId):
|
||||
if tagId < 0:
|
||||
raise error.PyAsn1Error('Negative tag ID (%s) not allowed' % tagId)
|
||||
self.__tagClass = tagClass
|
||||
self.__tagFormat = tagFormat
|
||||
self.__tagId = tagId
|
||||
self.__tagClassId = tagClass, tagId
|
||||
self.__hash = hash(self.__tagClassId)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '[%s:%s:%s]' % (
|
||||
self.__tagClass, self.__tagFormat, self.__tagId)
|
||||
return '<%s object, tag %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__tagClassId == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__tagClassId != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__tagClassId < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__tagClassId <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__tagClassId > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__tagClassId >= other
|
||||
|
||||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def __getitem__(self, idx):
|
||||
if idx == 0:
|
||||
return self.__tagClass
|
||||
elif idx == 1:
|
||||
return self.__tagFormat
|
||||
elif idx == 2:
|
||||
return self.__tagId
|
||||
else:
|
||||
raise IndexError()
|
||||
|
||||
def __iter__(self):
|
||||
yield self.__tagClass
|
||||
yield self.__tagFormat
|
||||
yield self.__tagId
|
||||
|
||||
def __and__(self, otherTag):
|
||||
return self.__class__(self.__tagClass & otherTag.tagClass,
|
||||
self.__tagFormat & otherTag.tagFormat,
|
||||
self.__tagId & otherTag.tagId)
|
||||
|
||||
def __or__(self, otherTag):
|
||||
return self.__class__(self.__tagClass | otherTag.tagClass,
|
||||
self.__tagFormat | otherTag.tagFormat,
|
||||
self.__tagId | otherTag.tagId)
|
||||
|
||||
@property
|
||||
def tagClass(self):
|
||||
"""ASN.1 tag class
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Tag class
|
||||
"""
|
||||
return self.__tagClass
|
||||
|
||||
@property
|
||||
def tagFormat(self):
|
||||
"""ASN.1 tag format
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Tag format
|
||||
"""
|
||||
return self.__tagFormat
|
||||
|
||||
@property
|
||||
def tagId(self):
|
||||
"""ASN.1 tag ID
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Tag ID
|
||||
"""
|
||||
return self.__tagId
|
||||
|
||||
|
||||
class TagSet(object):
|
||||
"""Create a collection of ASN.1 tags
|
||||
|
||||
Represents a combination of :class:`~pyasn1.type.tag.Tag` objects
|
||||
that can be attached to a ASN.1 type to make types distinguishable
|
||||
from each other.
|
||||
|
||||
*TagSet* objects are immutable and duck-type Python :class:`tuple` objects
|
||||
holding arbitrary number of :class:`~pyasn1.type.tag.Tag` objects.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
baseTag: :class:`~pyasn1.type.tag.Tag`
|
||||
Base *Tag* object. This tag survives IMPLICIT tagging.
|
||||
|
||||
*superTags: :class:`~pyasn1.type.tag.Tag`
|
||||
Additional *Tag* objects taking part in subtyping.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class OrderNumber(NumericString):
|
||||
'''
|
||||
ASN.1 specification
|
||||
|
||||
Order-number ::=
|
||||
[APPLICATION 5] IMPLICIT NumericString
|
||||
'''
|
||||
tagSet = NumericString.tagSet.tagImplicitly(
|
||||
Tag(tagClassApplication, tagFormatSimple, 5)
|
||||
)
|
||||
|
||||
orderNumber = OrderNumber('1234')
|
||||
"""
|
||||
def __init__(self, baseTag=(), *superTags):
|
||||
self.__baseTag = baseTag
|
||||
self.__superTags = superTags
|
||||
self.__superTagsClassId = tuple(
|
||||
[(superTag.tagClass, superTag.tagId) for superTag in superTags]
|
||||
)
|
||||
self.__lenOfSuperTags = len(superTags)
|
||||
self.__hash = hash(self.__superTagsClassId)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '-'.join(['%s:%s:%s' % (x.tagClass, x.tagFormat, x.tagId)
|
||||
for x in self.__superTags])
|
||||
if representation:
|
||||
representation = 'tags ' + representation
|
||||
else:
|
||||
representation = 'untagged'
|
||||
|
||||
return '<%s object, %s>' % (self.__class__.__name__, representation)
|
||||
|
||||
def __add__(self, superTag):
|
||||
return self.__class__(self.__baseTag, *self.__superTags + (superTag,))
|
||||
|
||||
def __radd__(self, superTag):
|
||||
return self.__class__(self.__baseTag, *(superTag,) + self.__superTags)
|
||||
|
||||
def __getitem__(self, i):
|
||||
if i.__class__ is slice:
|
||||
return self.__class__(self.__baseTag, *self.__superTags[i])
|
||||
else:
|
||||
return self.__superTags[i]
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__superTagsClassId == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__superTagsClassId != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__superTagsClassId < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__superTagsClassId <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__superTagsClassId > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__superTagsClassId >= other
|
||||
|
||||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def __len__(self):
|
||||
return self.__lenOfSuperTags
|
||||
|
||||
@property
|
||||
def baseTag(self):
|
||||
"""Return base ASN.1 tag
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tag.Tag`
|
||||
Base tag of this *TagSet*
|
||||
"""
|
||||
return self.__baseTag
|
||||
|
||||
@property
|
||||
def superTags(self):
|
||||
"""Return ASN.1 tags
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`tuple`
|
||||
Tuple of :class:`~pyasn1.type.tag.Tag` objects that this *TagSet* contains
|
||||
"""
|
||||
return self.__superTags
|
||||
|
||||
def tagExplicitly(self, superTag):
|
||||
"""Return explicitly tagged *TagSet*
|
||||
|
||||
Create a new *TagSet* representing callee *TagSet* explicitly tagged
|
||||
with passed tag(s). With explicit tagging mode, new tags are appended
|
||||
to existing tag(s).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
superTag: :class:`~pyasn1.type.tag.Tag`
|
||||
*Tag* object to tag this *TagSet*
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tag.TagSet`
|
||||
New *TagSet* object
|
||||
"""
|
||||
if superTag.tagClass == tagClassUniversal:
|
||||
raise error.PyAsn1Error("Can't tag with UNIVERSAL class tag")
|
||||
if superTag.tagFormat != tagFormatConstructed:
|
||||
superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId)
|
||||
return self + superTag
|
||||
|
||||
def tagImplicitly(self, superTag):
|
||||
"""Return implicitly tagged *TagSet*
|
||||
|
||||
Create a new *TagSet* representing callee *TagSet* implicitly tagged
|
||||
with passed tag(s). With implicit tagging mode, new tag(s) replace the
|
||||
last existing tag.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
superTag: :class:`~pyasn1.type.tag.Tag`
|
||||
*Tag* object to tag this *TagSet*
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tag.TagSet`
|
||||
New *TagSet* object
|
||||
"""
|
||||
if self.__superTags:
|
||||
superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId)
|
||||
return self[:-1] + superTag
|
||||
|
||||
def isSuperTagSetOf(self, tagSet):
|
||||
"""Test type relationship against given *TagSet*
|
||||
|
||||
The callee is considered to be a supertype of given *TagSet*
|
||||
tag-wise if all tags in *TagSet* are present in the callee and
|
||||
they are in the same order.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagSet: :class:`~pyasn1.type.tag.TagSet`
|
||||
*TagSet* object to evaluate against the callee
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`bool`
|
||||
:obj:`True` if callee is a supertype of *tagSet*
|
||||
"""
|
||||
if len(tagSet) < self.__lenOfSuperTags:
|
||||
return False
|
||||
return self.__superTags == tagSet[:self.__lenOfSuperTags]
|
||||
|
||||
# Backward compatibility
|
||||
|
||||
def getBaseTag(self):
|
||||
return self.__baseTag
|
||||
|
||||
def initTagSet(tag):
|
||||
return TagSet(tag, tag)
|
||||
96
lib/pyasn1/type/tagmap.py
Normal file
96
lib/pyasn1/type/tagmap.py
Normal file
@@ -0,0 +1,96 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = ['TagMap']
|
||||
|
||||
|
||||
class TagMap(object):
|
||||
"""Map *TagSet* objects to ASN.1 types
|
||||
|
||||
Create an object mapping *TagSet* object to ASN.1 type.
|
||||
|
||||
*TagMap* objects are immutable and duck-type read-only Python
|
||||
:class:`dict` objects holding *TagSet* objects as keys and ASN.1
|
||||
type objects as values.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
presentTypes: :py:class:`dict`
|
||||
Map of :class:`~pyasn1.type.tag.TagSet` to ASN.1 objects considered
|
||||
as being unconditionally present in the *TagMap*.
|
||||
|
||||
skipTypes: :py:class:`dict`
|
||||
A collection of :class:`~pyasn1.type.tag.TagSet` objects considered
|
||||
as absent in the *TagMap* even when *defaultType* is present.
|
||||
|
||||
defaultType: ASN.1 type object
|
||||
An ASN.1 type object callee *TagMap* returns for any *TagSet* key not present
|
||||
in *presentTypes* (unless given key is present in *skipTypes*).
|
||||
"""
|
||||
def __init__(self, presentTypes=None, skipTypes=None, defaultType=None):
|
||||
self.__presentTypes = presentTypes or {}
|
||||
self.__skipTypes = skipTypes or {}
|
||||
self.__defaultType = defaultType
|
||||
|
||||
def __contains__(self, tagSet):
|
||||
return (tagSet in self.__presentTypes or
|
||||
self.__defaultType is not None and tagSet not in self.__skipTypes)
|
||||
|
||||
def __getitem__(self, tagSet):
|
||||
try:
|
||||
return self.__presentTypes[tagSet]
|
||||
except KeyError:
|
||||
if self.__defaultType is None:
|
||||
raise KeyError()
|
||||
elif tagSet in self.__skipTypes:
|
||||
raise error.PyAsn1Error('Key in negative map')
|
||||
else:
|
||||
return self.__defaultType
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__presentTypes)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s object' % self.__class__.__name__
|
||||
|
||||
if self.__presentTypes:
|
||||
representation += ', present %s' % repr(self.__presentTypes)
|
||||
|
||||
if self.__skipTypes:
|
||||
representation += ', skip %s' % repr(self.__skipTypes)
|
||||
|
||||
if self.__defaultType is not None:
|
||||
representation += ', default %s' % repr(self.__defaultType)
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
@property
|
||||
def presentTypes(self):
|
||||
"""Return *TagSet* to ASN.1 type map present in callee *TagMap*"""
|
||||
return self.__presentTypes
|
||||
|
||||
@property
|
||||
def skipTypes(self):
|
||||
"""Return *TagSet* collection unconditionally absent in callee *TagMap*"""
|
||||
return self.__skipTypes
|
||||
|
||||
@property
|
||||
def defaultType(self):
|
||||
"""Return default ASN.1 type being returned for any missing *TagSet*"""
|
||||
return self.__defaultType
|
||||
|
||||
# Backward compatibility
|
||||
|
||||
def getPosMap(self):
|
||||
return self.presentTypes
|
||||
|
||||
def getNegMap(self):
|
||||
return self.skipTypes
|
||||
|
||||
def getDef(self):
|
||||
return self.defaultType
|
||||
3305
lib/pyasn1/type/univ.py
Normal file
3305
lib/pyasn1/type/univ.py
Normal file
File diff suppressed because it is too large
Load Diff
189
lib/pyasn1/type/useful.py
Normal file
189
lib/pyasn1/type/useful.py
Normal file
@@ -0,0 +1,189 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import datetime
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime']
|
||||
|
||||
NoValue = univ.NoValue
|
||||
noValue = univ.noValue
|
||||
|
||||
|
||||
class ObjectDescriptor(char.GraphicString):
|
||||
__doc__ = char.GraphicString.__doc__
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
|
||||
tagSet = char.GraphicString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
|
||||
)
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = char.GraphicString.getTypeId()
|
||||
|
||||
|
||||
class TimeMixIn(object):
|
||||
|
||||
_yearsDigits = 4
|
||||
_hasSubsecond = False
|
||||
_optionalMinutes = False
|
||||
_shortTZ = False
|
||||
|
||||
class FixedOffset(datetime.tzinfo):
|
||||
"""Fixed offset in minutes east from UTC."""
|
||||
|
||||
# defaulted arguments required
|
||||
# https: // docs.python.org / 2.3 / lib / datetime - tzinfo.html
|
||||
def __init__(self, offset=0, name='UTC'):
|
||||
self.__offset = datetime.timedelta(minutes=offset)
|
||||
self.__name = name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.__offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self.__name
|
||||
|
||||
def dst(self, dt):
|
||||
return datetime.timedelta(0)
|
||||
|
||||
UTC = FixedOffset()
|
||||
|
||||
@property
|
||||
def asDateTime(self):
|
||||
"""Create :py:class:`datetime.datetime` object from a |ASN.1| object.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of :py:class:`datetime.datetime` object
|
||||
"""
|
||||
text = str(self)
|
||||
if text.endswith('Z'):
|
||||
tzinfo = TimeMixIn.UTC
|
||||
text = text[:-1]
|
||||
|
||||
elif '-' in text or '+' in text:
|
||||
if '+' in text:
|
||||
text, plusminus, tz = text.partition('+')
|
||||
else:
|
||||
text, plusminus, tz = text.partition('-')
|
||||
|
||||
if self._shortTZ and len(tz) == 2:
|
||||
tz += '00'
|
||||
|
||||
if len(tz) != 4:
|
||||
raise error.PyAsn1Error('malformed time zone offset %s' % tz)
|
||||
|
||||
try:
|
||||
minutes = int(tz[:2]) * 60 + int(tz[2:])
|
||||
if plusminus == '-':
|
||||
minutes *= -1
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('unknown time specification %s' % self)
|
||||
|
||||
tzinfo = TimeMixIn.FixedOffset(minutes, '?')
|
||||
|
||||
else:
|
||||
tzinfo = None
|
||||
|
||||
if '.' in text or ',' in text:
|
||||
if '.' in text:
|
||||
text, _, ms = text.partition('.')
|
||||
else:
|
||||
text, _, ms = text.partition(',')
|
||||
|
||||
try:
|
||||
ms = int(ms) * 1000
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('bad sub-second time specification %s' % self)
|
||||
|
||||
else:
|
||||
ms = 0
|
||||
|
||||
if self._optionalMinutes and len(text) - self._yearsDigits == 6:
|
||||
text += '0000'
|
||||
elif len(text) - self._yearsDigits == 8:
|
||||
text += '00'
|
||||
|
||||
try:
|
||||
dt = datetime.datetime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('malformed datetime format %s' % self)
|
||||
|
||||
return dt.replace(microsecond=ms, tzinfo=tzinfo)
|
||||
|
||||
@classmethod
|
||||
def fromDateTime(cls, dt):
|
||||
"""Create |ASN.1| object from a :py:class:`datetime.datetime` object.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dt: :py:class:`datetime.datetime` object
|
||||
The `datetime.datetime` object to initialize the |ASN.1| object
|
||||
from
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| value
|
||||
"""
|
||||
text = dt.strftime(cls._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
|
||||
if cls._hasSubsecond:
|
||||
text += '.%d' % (dt.microsecond // 1000)
|
||||
|
||||
if dt.utcoffset():
|
||||
seconds = dt.utcoffset().seconds
|
||||
if seconds < 0:
|
||||
text += '-'
|
||||
else:
|
||||
text += '+'
|
||||
text += '%.2d%.2d' % (seconds // 3600, seconds % 3600)
|
||||
else:
|
||||
text += 'Z'
|
||||
|
||||
return cls(text)
|
||||
|
||||
|
||||
class GeneralizedTime(char.VisibleString, TimeMixIn):
|
||||
__doc__ = char.VisibleString.__doc__
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
|
||||
tagSet = char.VisibleString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
|
||||
)
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = char.VideotexString.getTypeId()
|
||||
|
||||
_yearsDigits = 4
|
||||
_hasSubsecond = True
|
||||
_optionalMinutes = True
|
||||
_shortTZ = True
|
||||
|
||||
|
||||
class UTCTime(char.VisibleString, TimeMixIn):
|
||||
__doc__ = char.VisibleString.__doc__
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
|
||||
tagSet = char.VisibleString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
|
||||
)
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = char.VideotexString.getTypeId()
|
||||
|
||||
_yearsDigits = 2
|
||||
_hasSubsecond = False
|
||||
_optionalMinutes = False
|
||||
_shortTZ = False
|
||||
@@ -3,4 +3,4 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), 'python3' if sys.version_info[0] >= 3 else 'python2'))
|
||||
|
||||
@@ -2,13 +2,15 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
|
||||
|
||||
from nmb.NetBIOS import NetBIOS
|
||||
from platformcode import logger
|
||||
from smb.SMBConnection import SMBConnection
|
||||
|
||||
GitHub = 'https://github.com/miketeo/pysmb' #buscar aquí de vez en cuando la última versiónde SMB-pysmb, y actualizar en Alfa
|
||||
vesion_actual_pysmb = '1.1.25' #actualizada el 25/11/2018
|
||||
GitHub = 'https://github.com/miketeo/pysmb' # check here from time to time for the latest version of SMB-pysmb
|
||||
vesion_actual_pysmb = '1.2.6' #updated on 26/12/2020
|
||||
|
||||
remote = None
|
||||
|
||||
@@ -65,11 +67,8 @@ def connect(url):
|
||||
def listdir(url):
|
||||
logger.info("Url: %s" % url)
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
files = [f.filename for f in remote.listPath(share_name, path) if not f.filename in [".", ".."]]
|
||||
return files
|
||||
except Exception, e:
|
||||
raise type(e)(e.message, "")
|
||||
files = [f.filename for f in remote.listPath(share_name, path) if not f.filename in [".", ".."]]
|
||||
return files
|
||||
|
||||
|
||||
def walk(url, topdown=True, onerror=None):
|
||||
@@ -78,7 +77,7 @@ def walk(url, topdown=True, onerror=None):
|
||||
|
||||
try:
|
||||
names = remote.listPath(share_name, path)
|
||||
except Exception, _err:
|
||||
except Exception as _err:
|
||||
if onerror is not None:
|
||||
onerror(_err)
|
||||
return
|
||||
@@ -107,7 +106,7 @@ def get_attributes(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
return remote.getAttributes(share_name, path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
|
||||
@@ -116,7 +115,7 @@ def mkdir(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
remote.createDirectory(share_name, path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
|
||||
@@ -130,7 +129,7 @@ def isfile(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
files = [f.filename for f in remote.listPath(share_name, os.path.dirname(path)) if not f.isDirectory]
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
return os.path.basename(path) in files
|
||||
|
||||
@@ -140,7 +139,7 @@ def isdir(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
folders = [f.filename for f in remote.listPath(share_name, os.path.dirname(path)) if f.isDirectory]
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
return os.path.basename(path) in folders or path == "/"
|
||||
|
||||
@@ -150,7 +149,7 @@ def exists(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
files = [f.filename for f in remote.listPath(share_name, os.path.dirname(path))]
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
return os.path.basename(path) in files or path == "/"
|
||||
|
||||
@@ -160,7 +159,7 @@ def remove(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
remote.deleteFiles(share_name, path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
|
||||
@@ -169,7 +168,7 @@ def rmdir(url):
|
||||
remote, share_name, path = connect(url)
|
||||
try:
|
||||
remote.deleteDirectory(share_name, path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
|
||||
@@ -179,7 +178,7 @@ def rename(url, new_name):
|
||||
_, _, _, new_name, _, _, _ = parse_url(new_name)
|
||||
try:
|
||||
remote.rename(share_name, path, new_name)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
|
||||
@@ -225,7 +224,7 @@ class SMBFile(object):
|
||||
if "r+" in self.mode:
|
||||
try:
|
||||
attr = self.remote.getAttributes(self.share, self.path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
self.size = attr.file_size
|
||||
@@ -236,7 +235,7 @@ class SMBFile(object):
|
||||
elif "r" in self.mode:
|
||||
try:
|
||||
attr = self.remote.getAttributes(self.share, self.path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
self.size = attr.file_size
|
||||
@@ -246,7 +245,7 @@ class SMBFile(object):
|
||||
elif "w+" in self.mode:
|
||||
try:
|
||||
self.remote.storeFileFromOffset(self.share, self.path, self.tmpfile(), 0, truncate=True)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
self.canread = True
|
||||
@@ -256,7 +255,7 @@ class SMBFile(object):
|
||||
elif "w" in self.mode:
|
||||
try:
|
||||
self.remote.storeFileFromOffset(self.share, self.path, self.tmpfile(), 0, truncate=True)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
self.canwrite = True
|
||||
@@ -266,7 +265,7 @@ class SMBFile(object):
|
||||
try:
|
||||
self.remote.storeFileFromOffset(self.share, self.path, self.tmpfile(), 0)
|
||||
attr = self.remote.getAttributes(self.share, self.path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
self.size = attr.file_size
|
||||
@@ -279,7 +278,7 @@ class SMBFile(object):
|
||||
try:
|
||||
self.remote.storeFileFromOffset(self.share, self.path, self.tmpfile(), 0)
|
||||
attr = self.remote.getAttributes(self.share, self.path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise type(e)(e.message, "")
|
||||
|
||||
self.size = attr.file_size
|
||||
@@ -314,7 +313,7 @@ class SMBFile(object):
|
||||
if self.pos > self.size:
|
||||
self.size = self.pos
|
||||
|
||||
def read(self, size=-1L):
|
||||
def read(self, size=-1):
|
||||
if not self.canread:
|
||||
raise IOError("File not open for reading")
|
||||
f = self.tmpfile()
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
import sys
|
||||
|
||||
# http://www.python.org/dev/peps/pep-0396/
|
||||
__version__ = '0.1.9'
|
||||
|
||||
if sys.version_info[:2] < (2, 4):
|
||||
raise RuntimeError('PyASN1 requires Python 2.4 or later')
|
||||
|
||||
@@ -1,842 +0,0 @@
|
||||
# BER decoder
|
||||
from pyasn1 import debug, error
|
||||
from pyasn1.codec.ber import eoo
|
||||
from pyasn1.compat.octets import oct2int, isOctetsType
|
||||
from pyasn1.type import tag, univ, char, useful, tagmap
|
||||
|
||||
|
||||
class AbstractDecoder:
|
||||
protoComponent = None
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
|
||||
|
||||
class AbstractSimpleDecoder(AbstractDecoder):
|
||||
tagFormats = (tag.tagFormatSimple,)
|
||||
def _createComponent(self, asn1Spec, tagSet, value=None):
|
||||
if tagSet[0][1] not in self.tagFormats:
|
||||
raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
|
||||
if asn1Spec is None:
|
||||
return self.protoComponent.clone(value, tagSet)
|
||||
elif value is None:
|
||||
return asn1Spec
|
||||
else:
|
||||
return asn1Spec.clone(value)
|
||||
|
||||
class AbstractConstructedDecoder(AbstractDecoder):
|
||||
tagFormats = (tag.tagFormatConstructed,)
|
||||
def _createComponent(self, asn1Spec, tagSet, value=None):
|
||||
if tagSet[0][1] not in self.tagFormats:
|
||||
raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
|
||||
if asn1Spec is None:
|
||||
return self.protoComponent.clone(tagSet)
|
||||
else:
|
||||
return asn1Spec.clone()
|
||||
|
||||
class ExplicitTagDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.Any('')
|
||||
tagFormats = (tag.tagFormatConstructed,)
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
if substrateFun:
|
||||
return substrateFun(
|
||||
self._createComponent(asn1Spec, tagSet, ''),
|
||||
substrate, length
|
||||
)
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
value, _ = decodeFun(head, asn1Spec, tagSet, length)
|
||||
return value, tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
if substrateFun:
|
||||
return substrateFun(
|
||||
self._createComponent(asn1Spec, tagSet, ''),
|
||||
substrate, length
|
||||
)
|
||||
value, substrate = decodeFun(substrate, asn1Spec, tagSet, length)
|
||||
terminator, substrate = decodeFun(substrate, allowEoo=True)
|
||||
if eoo.endOfOctets.isSameTypeWith(terminator) and \
|
||||
terminator == eoo.endOfOctets:
|
||||
return value, substrate
|
||||
else:
|
||||
raise error.PyAsn1Error('Missing end-of-octets terminator')
|
||||
|
||||
explicitTagDecoder = ExplicitTagDecoder()
|
||||
|
||||
class IntegerDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.Integer(0)
|
||||
precomputedValues = {
|
||||
'\x00': 0,
|
||||
'\x01': 1,
|
||||
'\x02': 2,
|
||||
'\x03': 3,
|
||||
'\x04': 4,
|
||||
'\x05': 5,
|
||||
'\x06': 6,
|
||||
'\x07': 7,
|
||||
'\x08': 8,
|
||||
'\x09': 9,
|
||||
'\xff': -1,
|
||||
'\xfe': -2,
|
||||
'\xfd': -3,
|
||||
'\xfc': -4,
|
||||
'\xfb': -5
|
||||
}
|
||||
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if not head:
|
||||
return self._createComponent(asn1Spec, tagSet, 0), tail
|
||||
if head in self.precomputedValues:
|
||||
value = self.precomputedValues[head]
|
||||
else:
|
||||
firstOctet = oct2int(head[0])
|
||||
if firstOctet & 0x80:
|
||||
value = -1
|
||||
else:
|
||||
value = 0
|
||||
for octet in head:
|
||||
value = value << 8 | oct2int(octet)
|
||||
return self._createComponent(asn1Spec, tagSet, value), tail
|
||||
|
||||
class BooleanDecoder(IntegerDecoder):
|
||||
protoComponent = univ.Boolean(0)
|
||||
def _createComponent(self, asn1Spec, tagSet, value=None):
|
||||
return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0)
|
||||
|
||||
class BitStringDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.BitString(())
|
||||
tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check?
|
||||
if not head:
|
||||
raise error.PyAsn1Error('Empty substrate')
|
||||
trailingBits = oct2int(head[0])
|
||||
if trailingBits > 7:
|
||||
raise error.PyAsn1Error(
|
||||
'Trailing bits overflow %s' % trailingBits
|
||||
)
|
||||
head = head[1:]
|
||||
lsb = p = 0; l = len(head)-1; b = []
|
||||
while p <= l:
|
||||
if p == l:
|
||||
lsb = trailingBits
|
||||
j = 7
|
||||
o = oct2int(head[p])
|
||||
while j >= lsb:
|
||||
b.append((o>>j)&0x01)
|
||||
j = j - 1
|
||||
p = p + 1
|
||||
return self._createComponent(asn1Spec, tagSet, b), tail
|
||||
r = self._createComponent(asn1Spec, tagSet, ())
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
while head:
|
||||
component, head = decodeFun(head, self.protoComponent)
|
||||
r = r + component
|
||||
return r, tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
r = self._createComponent(asn1Spec, tagSet, '')
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
while substrate:
|
||||
component, substrate = decodeFun(substrate, self.protoComponent,
|
||||
allowEoo=True)
|
||||
if eoo.endOfOctets.isSameTypeWith(component) and \
|
||||
component == eoo.endOfOctets:
|
||||
break
|
||||
r = r + component
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'No EOO seen before substrate ends'
|
||||
)
|
||||
return r, substrate
|
||||
|
||||
class OctetStringDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.OctetString('')
|
||||
tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check?
|
||||
return self._createComponent(asn1Spec, tagSet, head), tail
|
||||
r = self._createComponent(asn1Spec, tagSet, '')
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
while head:
|
||||
component, head = decodeFun(head, self.protoComponent)
|
||||
r = r + component
|
||||
return r, tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
r = self._createComponent(asn1Spec, tagSet, '')
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
while substrate:
|
||||
component, substrate = decodeFun(substrate, self.protoComponent,
|
||||
allowEoo=True)
|
||||
if eoo.endOfOctets.isSameTypeWith(component) and \
|
||||
component == eoo.endOfOctets:
|
||||
break
|
||||
r = r + component
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'No EOO seen before substrate ends'
|
||||
)
|
||||
return r, substrate
|
||||
|
||||
class NullDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.Null('')
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
if head:
|
||||
raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length)
|
||||
return r, tail
|
||||
|
||||
class ObjectIdentifierDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.ObjectIdentifier(())
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if not head:
|
||||
raise error.PyAsn1Error('Empty substrate')
|
||||
|
||||
oid = ()
|
||||
index = 0
|
||||
substrateLen = len(head)
|
||||
while index < substrateLen:
|
||||
subId = oct2int(head[index])
|
||||
index += 1
|
||||
if subId < 128:
|
||||
oid = oid + (subId,)
|
||||
elif subId > 128:
|
||||
# Construct subid from a number of octets
|
||||
nextSubId = subId
|
||||
subId = 0
|
||||
while nextSubId >= 128:
|
||||
subId = (subId << 7) + (nextSubId & 0x7F)
|
||||
if index >= substrateLen:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Short substrate for sub-OID past %s' % (oid,)
|
||||
)
|
||||
nextSubId = oct2int(head[index])
|
||||
index += 1
|
||||
oid = oid + ((subId << 7) + nextSubId,)
|
||||
elif subId == 128:
|
||||
# ASN.1 spec forbids leading zeros (0x80) in OID
|
||||
# encoding, tolerating it opens a vulnerability. See
|
||||
# http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf
|
||||
# page 7
|
||||
raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
|
||||
|
||||
# Decode two leading arcs
|
||||
if 0 <= oid[0] <= 39:
|
||||
oid = (0,) + oid
|
||||
elif 40 <= oid[0] <= 79:
|
||||
oid = (1, oid[0]-40) + oid[1:]
|
||||
elif oid[0] >= 80:
|
||||
oid = (2, oid[0]-80) + oid[1:]
|
||||
else:
|
||||
raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
|
||||
|
||||
return self._createComponent(asn1Spec, tagSet, oid), tail
|
||||
|
||||
class RealDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.Real()
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if not head:
|
||||
return self._createComponent(asn1Spec, tagSet, 0.0), tail
|
||||
fo = oct2int(head[0]); head = head[1:]
|
||||
if fo & 0x80: # binary encoding
|
||||
if not head:
|
||||
raise error.PyAsn1Error("Incomplete floating-point value")
|
||||
n = (fo & 0x03) + 1
|
||||
if n == 4:
|
||||
n = oct2int(head[0])
|
||||
head = head[1:]
|
||||
eo, head = head[:n], head[n:]
|
||||
if not eo or not head:
|
||||
raise error.PyAsn1Error('Real exponent screwed')
|
||||
e = oct2int(eo[0]) & 0x80 and -1 or 0
|
||||
while eo: # exponent
|
||||
e <<= 8
|
||||
e |= oct2int(eo[0])
|
||||
eo = eo[1:]
|
||||
b = fo >> 4 & 0x03 # base bits
|
||||
if b > 2:
|
||||
raise error.PyAsn1Error('Illegal Real base')
|
||||
if b == 1: # encbase = 8
|
||||
e *= 3
|
||||
elif b == 2: # encbase = 16
|
||||
e *= 4
|
||||
p = 0
|
||||
while head: # value
|
||||
p <<= 8
|
||||
p |= oct2int(head[0])
|
||||
head = head[1:]
|
||||
if fo & 0x40: # sign bit
|
||||
p = -p
|
||||
sf = fo >> 2 & 0x03 # scale bits
|
||||
p *= 2**sf
|
||||
value = (p, 2, e)
|
||||
elif fo & 0x40: # infinite value
|
||||
value = fo & 0x01 and '-inf' or 'inf'
|
||||
elif fo & 0xc0 == 0: # character encoding
|
||||
if not head:
|
||||
raise error.PyAsn1Error("Incomplete floating-point value")
|
||||
try:
|
||||
if fo & 0x3 == 0x1: # NR1
|
||||
value = (int(head), 10, 0)
|
||||
elif fo & 0x3 == 0x2: # NR2
|
||||
value = float(head)
|
||||
elif fo & 0x3 == 0x3: # NR3
|
||||
value = float(head)
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Unknown NR (tag %s)' % fo
|
||||
)
|
||||
except ValueError:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Bad character Real syntax'
|
||||
)
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Unknown encoding (tag %s)' % fo
|
||||
)
|
||||
return self._createComponent(asn1Spec, tagSet, value), tail
|
||||
|
||||
class SequenceDecoder(AbstractConstructedDecoder):
|
||||
protoComponent = univ.Sequence()
|
||||
def _getComponentTagMap(self, r, idx):
|
||||
try:
|
||||
return r.getComponentTagMapNearPosition(idx)
|
||||
except error.PyAsn1Error:
|
||||
return
|
||||
|
||||
def _getComponentPositionByType(self, r, t, idx):
|
||||
return r.getComponentPositionNearType(t, idx)
|
||||
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
idx = 0
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
while head:
|
||||
asn1Spec = self._getComponentTagMap(r, idx)
|
||||
component, head = decodeFun(head, asn1Spec)
|
||||
idx = self._getComponentPositionByType(
|
||||
r, component.getEffectiveTagSet(), idx
|
||||
)
|
||||
r.setComponentByPosition(idx, component, asn1Spec is None)
|
||||
idx = idx + 1
|
||||
r.setDefaultComponents()
|
||||
r.verifySizeSpec()
|
||||
return r, tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
idx = 0
|
||||
while substrate:
|
||||
asn1Spec = self._getComponentTagMap(r, idx)
|
||||
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
|
||||
if eoo.endOfOctets.isSameTypeWith(component) and \
|
||||
component == eoo.endOfOctets:
|
||||
break
|
||||
idx = self._getComponentPositionByType(
|
||||
r, component.getEffectiveTagSet(), idx
|
||||
)
|
||||
r.setComponentByPosition(idx, component, asn1Spec is None)
|
||||
idx = idx + 1
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'No EOO seen before substrate ends'
|
||||
)
|
||||
r.setDefaultComponents()
|
||||
r.verifySizeSpec()
|
||||
return r, substrate
|
||||
|
||||
class SequenceOfDecoder(AbstractConstructedDecoder):
|
||||
protoComponent = univ.SequenceOf()
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
asn1Spec = r.getComponentType()
|
||||
idx = 0
|
||||
while head:
|
||||
component, head = decodeFun(head, asn1Spec)
|
||||
r.setComponentByPosition(idx, component, asn1Spec is None)
|
||||
idx = idx + 1
|
||||
r.verifySizeSpec()
|
||||
return r, tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
asn1Spec = r.getComponentType()
|
||||
idx = 0
|
||||
while substrate:
|
||||
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
|
||||
if eoo.endOfOctets.isSameTypeWith(component) and \
|
||||
component == eoo.endOfOctets:
|
||||
break
|
||||
r.setComponentByPosition(idx, component, asn1Spec is None)
|
||||
idx = idx + 1
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'No EOO seen before substrate ends'
|
||||
)
|
||||
r.verifySizeSpec()
|
||||
return r, substrate
|
||||
|
||||
class SetDecoder(SequenceDecoder):
|
||||
protoComponent = univ.Set()
|
||||
def _getComponentTagMap(self, r, idx):
|
||||
return r.getComponentTagMap()
|
||||
|
||||
def _getComponentPositionByType(self, r, t, idx):
|
||||
nextIdx = r.getComponentPositionByType(t)
|
||||
if nextIdx is None:
|
||||
return idx
|
||||
else:
|
||||
return nextIdx
|
||||
|
||||
class SetOfDecoder(SequenceOfDecoder):
|
||||
protoComponent = univ.SetOf()
|
||||
|
||||
class ChoiceDecoder(AbstractConstructedDecoder):
|
||||
protoComponent = univ.Choice()
|
||||
tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
if r.getTagSet() == tagSet: # explicitly tagged Choice
|
||||
component, head = decodeFun(
|
||||
head, r.getComponentTagMap()
|
||||
)
|
||||
else:
|
||||
component, head = decodeFun(
|
||||
head, r.getComponentTagMap(), tagSet, length, state
|
||||
)
|
||||
if isinstance(component, univ.Choice):
|
||||
effectiveTagSet = component.getEffectiveTagSet()
|
||||
else:
|
||||
effectiveTagSet = component.getTagSet()
|
||||
r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None)
|
||||
return r, tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
r = self._createComponent(asn1Spec, tagSet)
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
if r.getTagSet() == tagSet: # explicitly tagged Choice
|
||||
component, substrate = decodeFun(substrate, r.getComponentTagMap())
|
||||
# eat up EOO marker
|
||||
eooMarker, substrate = decodeFun(substrate, allowEoo=True)
|
||||
if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \
|
||||
eooMarker != eoo.endOfOctets:
|
||||
raise error.PyAsn1Error('No EOO seen before substrate ends')
|
||||
else:
|
||||
component, substrate= decodeFun(
|
||||
substrate, r.getComponentTagMap(), tagSet, length, state
|
||||
)
|
||||
if isinstance(component, univ.Choice):
|
||||
effectiveTagSet = component.getEffectiveTagSet()
|
||||
else:
|
||||
effectiveTagSet = component.getTagSet()
|
||||
r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None)
|
||||
return r, substrate
|
||||
|
||||
class AnyDecoder(AbstractSimpleDecoder):
|
||||
protoComponent = univ.Any()
|
||||
tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
if asn1Spec is None or \
|
||||
asn1Spec is not None and tagSet != asn1Spec.getTagSet():
|
||||
# untagged Any container, recover inner header substrate
|
||||
length = length + len(fullSubstrate) - len(substrate)
|
||||
substrate = fullSubstrate
|
||||
if substrateFun:
|
||||
return substrateFun(self._createComponent(asn1Spec, tagSet),
|
||||
substrate, length)
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
return self._createComponent(asn1Spec, tagSet, value=head), tail
|
||||
|
||||
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
|
||||
length, state, decodeFun, substrateFun):
|
||||
if asn1Spec is not None and tagSet == asn1Spec.getTagSet():
|
||||
# tagged Any type -- consume header substrate
|
||||
header = ''
|
||||
else:
|
||||
# untagged Any, recover header substrate
|
||||
header = fullSubstrate[:-len(substrate)]
|
||||
|
||||
r = self._createComponent(asn1Spec, tagSet, header)
|
||||
|
||||
# Any components do not inherit initial tag
|
||||
asn1Spec = self.protoComponent
|
||||
|
||||
if substrateFun:
|
||||
return substrateFun(r, substrate, length)
|
||||
while substrate:
|
||||
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
|
||||
if eoo.endOfOctets.isSameTypeWith(component) and \
|
||||
component == eoo.endOfOctets:
|
||||
break
|
||||
r = r + component
|
||||
else:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'No EOO seen before substrate ends'
|
||||
)
|
||||
return r, substrate
|
||||
|
||||
# character string types
|
||||
class UTF8StringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.UTF8String()
|
||||
class NumericStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.NumericString()
|
||||
class PrintableStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.PrintableString()
|
||||
class TeletexStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.TeletexString()
|
||||
class VideotexStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.VideotexString()
|
||||
class IA5StringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.IA5String()
|
||||
class GraphicStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.GraphicString()
|
||||
class VisibleStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.VisibleString()
|
||||
class GeneralStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.GeneralString()
|
||||
class UniversalStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.UniversalString()
|
||||
class BMPStringDecoder(OctetStringDecoder):
|
||||
protoComponent = char.BMPString()
|
||||
|
||||
# "useful" types
|
||||
class ObjectDescriptorDecoder(OctetStringDecoder):
|
||||
protoComponent = useful.ObjectDescriptor()
|
||||
class GeneralizedTimeDecoder(OctetStringDecoder):
|
||||
protoComponent = useful.GeneralizedTime()
|
||||
class UTCTimeDecoder(OctetStringDecoder):
|
||||
protoComponent = useful.UTCTime()
|
||||
|
||||
tagMap = {
|
||||
univ.Integer.tagSet: IntegerDecoder(),
|
||||
univ.Boolean.tagSet: BooleanDecoder(),
|
||||
univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringDecoder(),
|
||||
univ.Null.tagSet: NullDecoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(),
|
||||
univ.Enumerated.tagSet: IntegerDecoder(),
|
||||
univ.Real.tagSet: RealDecoder(),
|
||||
univ.Sequence.tagSet: SequenceDecoder(), # conflicts with SequenceOf
|
||||
univ.Set.tagSet: SetDecoder(), # conflicts with SetOf
|
||||
univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
|
||||
# character string types
|
||||
char.UTF8String.tagSet: UTF8StringDecoder(),
|
||||
char.NumericString.tagSet: NumericStringDecoder(),
|
||||
char.PrintableString.tagSet: PrintableStringDecoder(),
|
||||
char.TeletexString.tagSet: TeletexStringDecoder(),
|
||||
char.VideotexString.tagSet: VideotexStringDecoder(),
|
||||
char.IA5String.tagSet: IA5StringDecoder(),
|
||||
char.GraphicString.tagSet: GraphicStringDecoder(),
|
||||
char.VisibleString.tagSet: VisibleStringDecoder(),
|
||||
char.GeneralString.tagSet: GeneralStringDecoder(),
|
||||
char.UniversalString.tagSet: UniversalStringDecoder(),
|
||||
char.BMPString.tagSet: BMPStringDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
|
||||
useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
|
||||
useful.UTCTime.tagSet: UTCTimeDecoder()
|
||||
}
|
||||
|
||||
# Type-to-codec map for ambiguous ASN.1 types
|
||||
typeMap = {
|
||||
univ.Set.typeId: SetDecoder(),
|
||||
univ.SetOf.typeId: SetOfDecoder(),
|
||||
univ.Sequence.typeId: SequenceDecoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfDecoder(),
|
||||
univ.Choice.typeId: ChoiceDecoder(),
|
||||
univ.Any.typeId: AnyDecoder()
|
||||
}
|
||||
|
||||
( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec,
|
||||
stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue,
|
||||
stDumpRawValue, stErrorCondition, stStop ) = [x for x in range(10)]
|
||||
|
||||
class Decoder:
|
||||
defaultErrorState = stErrorCondition
|
||||
# defaultErrorState = stDumpRawValue
|
||||
defaultRawDecoder = AnyDecoder()
|
||||
supportIndefLength = True
|
||||
def __init__(self, tagMap, typeMap={}):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
# Tag & TagSet objects caches
|
||||
self.__tagCache = {}
|
||||
self.__tagSetCache = {}
|
||||
|
||||
def __call__(self, substrate, asn1Spec=None, tagSet=None,
|
||||
length=None, state=stDecodeTag, recursiveFlag=1,
|
||||
substrateFun=None, allowEoo=False):
|
||||
if debug.logger & debug.flagDecoder:
|
||||
debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
|
||||
fullSubstrate = substrate
|
||||
while state != stStop:
|
||||
if state == stDecodeTag:
|
||||
if not substrate:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Short octet stream on tag decoding'
|
||||
)
|
||||
if not isOctetsType(substrate) and \
|
||||
not isinstance(substrate, univ.OctetString):
|
||||
raise error.PyAsn1Error('Bad octet stream type')
|
||||
# Decode tag
|
||||
firstOctet = substrate[0]
|
||||
substrate = substrate[1:]
|
||||
if firstOctet in self.__tagCache:
|
||||
lastTag = self.__tagCache[firstOctet]
|
||||
else:
|
||||
t = oct2int(firstOctet)
|
||||
# Look for end-of-octets sentinel
|
||||
if t == 0:
|
||||
if substrate and oct2int(substrate[0]) == 0:
|
||||
if allowEoo and self.supportIndefLength:
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found')
|
||||
value, substrate = eoo.endOfOctets, substrate[1:]
|
||||
state = stStop
|
||||
continue
|
||||
else:
|
||||
raise error.PyAsn1Error('Unexpected end-of-contents sentinel')
|
||||
else:
|
||||
raise error.PyAsn1Error('Zero tag encountered')
|
||||
tagClass = t&0xC0
|
||||
tagFormat = t&0x20
|
||||
tagId = t&0x1F
|
||||
if tagId == 0x1F:
|
||||
tagId = 0
|
||||
while 1:
|
||||
if not substrate:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Short octet stream on long tag decoding'
|
||||
)
|
||||
t = oct2int(substrate[0])
|
||||
tagId = tagId << 7 | (t&0x7F)
|
||||
substrate = substrate[1:]
|
||||
if not t&0x80:
|
||||
break
|
||||
lastTag = tag.Tag(
|
||||
tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
|
||||
)
|
||||
if tagId < 31:
|
||||
# cache short tags
|
||||
self.__tagCache[firstOctet] = lastTag
|
||||
if tagSet is None:
|
||||
if firstOctet in self.__tagSetCache:
|
||||
tagSet = self.__tagSetCache[firstOctet]
|
||||
else:
|
||||
# base tag not recovered
|
||||
tagSet = tag.TagSet((), lastTag)
|
||||
if firstOctet in self.__tagCache:
|
||||
self.__tagSetCache[firstOctet] = tagSet
|
||||
else:
|
||||
tagSet = lastTag + tagSet
|
||||
state = stDecodeLength
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet)
|
||||
if state == stDecodeLength:
|
||||
# Decode length
|
||||
if not substrate:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'Short octet stream on length decoding'
|
||||
)
|
||||
firstOctet = oct2int(substrate[0])
|
||||
if firstOctet == 128:
|
||||
size = 1
|
||||
length = -1
|
||||
elif firstOctet < 128:
|
||||
length, size = firstOctet, 1
|
||||
else:
|
||||
size = firstOctet & 0x7F
|
||||
# encoded in size bytes
|
||||
length = 0
|
||||
lengthString = substrate[1:size+1]
|
||||
# missing check on maximum size, which shouldn't be a
|
||||
# problem, we can handle more than is possible
|
||||
if len(lengthString) != size:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'%s<%s at %s' %
|
||||
(size, len(lengthString), tagSet)
|
||||
)
|
||||
for char in lengthString:
|
||||
length = (length << 8) | oct2int(char)
|
||||
size = size + 1
|
||||
substrate = substrate[size:]
|
||||
if length != -1 and len(substrate) < length:
|
||||
raise error.SubstrateUnderrunError(
|
||||
'%d-octet short' % (length - len(substrate))
|
||||
)
|
||||
if length == -1 and not self.supportIndefLength:
|
||||
error.PyAsn1Error('Indefinite length encoding not supported by this codec')
|
||||
state = stGetValueDecoder
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
|
||||
if state == stGetValueDecoder:
|
||||
if asn1Spec is None:
|
||||
state = stGetValueDecoderByTag
|
||||
else:
|
||||
state = stGetValueDecoderByAsn1Spec
|
||||
#
|
||||
# There're two ways of creating subtypes in ASN.1 what influences
|
||||
# decoder operation. These methods are:
|
||||
# 1) Either base types used in or no IMPLICIT tagging has been
|
||||
# applied on subtyping.
|
||||
# 2) Subtype syntax drops base type information (by means of
|
||||
# IMPLICIT tagging.
|
||||
# The first case allows for complete tag recovery from substrate
|
||||
# while the second one requires original ASN.1 type spec for
|
||||
# decoding.
|
||||
#
|
||||
# In either case a set of tags (tagSet) is coming from substrate
|
||||
# in an incremental, tag-by-tag fashion (this is the case of
|
||||
# EXPLICIT tag which is most basic). Outermost tag comes first
|
||||
# from the wire.
|
||||
#
|
||||
if state == stGetValueDecoderByTag:
|
||||
if tagSet in self.__tagMap:
|
||||
concreteDecoder = self.__tagMap[tagSet]
|
||||
else:
|
||||
concreteDecoder = None
|
||||
if concreteDecoder:
|
||||
state = stDecodeValue
|
||||
else:
|
||||
_k = tagSet[:1]
|
||||
if _k in self.__tagMap:
|
||||
concreteDecoder = self.__tagMap[_k]
|
||||
else:
|
||||
concreteDecoder = None
|
||||
if concreteDecoder:
|
||||
state = stDecodeValue
|
||||
else:
|
||||
state = stTryAsExplicitTag
|
||||
if debug.logger and debug.logger & debug.flagDecoder:
|
||||
debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag'))
|
||||
debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
|
||||
if state == stGetValueDecoderByAsn1Spec:
|
||||
if isinstance(asn1Spec, (dict, tagmap.TagMap)):
|
||||
if tagSet in asn1Spec:
|
||||
__chosenSpec = asn1Spec[tagSet]
|
||||
else:
|
||||
__chosenSpec = None
|
||||
if debug.logger and debug.logger & debug.flagDecoder:
|
||||
debug.logger('candidate ASN.1 spec is a map of:')
|
||||
for t, v in asn1Spec.getPosMap().items():
|
||||
debug.logger(' %s -> %s' % (t, v.__class__.__name__))
|
||||
if asn1Spec.getNegMap():
|
||||
debug.logger('but neither of: ')
|
||||
for t, v in asn1Spec.getNegMap().items():
|
||||
debug.logger(' %s -> %s' % (t, v.__class__.__name__))
|
||||
debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '<none>' or __chosenSpec.prettyPrintType(), tagSet))
|
||||
else:
|
||||
__chosenSpec = asn1Spec
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
|
||||
if __chosenSpec is not None and (
|
||||
tagSet == __chosenSpec.getTagSet() or \
|
||||
tagSet in __chosenSpec.getTagMap()
|
||||
):
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = __chosenSpec.baseTagSet
|
||||
if __chosenSpec.typeId is not None and \
|
||||
__chosenSpec.typeId in self.__typeMap:
|
||||
# ambiguous type
|
||||
concreteDecoder = self.__typeMap[__chosenSpec.typeId]
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen for an ambiguous type by type ID %s' % (__chosenSpec.typeId,))
|
||||
elif baseTagSet in self.__tagMap:
|
||||
# base type or tagged subtype
|
||||
concreteDecoder = self.__tagMap[baseTagSet]
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,))
|
||||
else:
|
||||
concreteDecoder = None
|
||||
if concreteDecoder:
|
||||
asn1Spec = __chosenSpec
|
||||
state = stDecodeValue
|
||||
else:
|
||||
state = stTryAsExplicitTag
|
||||
else:
|
||||
concreteDecoder = None
|
||||
state = stTryAsExplicitTag
|
||||
if debug.logger and debug.logger & debug.flagDecoder:
|
||||
debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag'))
|
||||
debug.scope.push(__chosenSpec is None and '?' or __chosenSpec.__class__.__name__)
|
||||
if state == stTryAsExplicitTag:
|
||||
if tagSet and \
|
||||
tagSet[0][1] == tag.tagFormatConstructed and \
|
||||
tagSet[0][0] != tag.tagClassUniversal:
|
||||
# Assume explicit tagging
|
||||
concreteDecoder = explicitTagDecoder
|
||||
state = stDecodeValue
|
||||
else:
|
||||
concreteDecoder = None
|
||||
state = self.defaultErrorState
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as failure'))
|
||||
if state == stDumpRawValue:
|
||||
concreteDecoder = self.defaultRawDecoder
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
|
||||
state = stDecodeValue
|
||||
if state == stDecodeValue:
|
||||
if recursiveFlag == 0 and not substrateFun: # legacy
|
||||
substrateFun = lambda a,b,c: (a,b[:c])
|
||||
if length == -1: # indef length
|
||||
value, substrate = concreteDecoder.indefLenValueDecoder(
|
||||
fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
stGetValueDecoder, self, substrateFun
|
||||
)
|
||||
else:
|
||||
value, substrate = concreteDecoder.valueDecoder(
|
||||
fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
stGetValueDecoder, self, substrateFun
|
||||
)
|
||||
state = stStop
|
||||
debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '<none>'))
|
||||
if state == stErrorCondition:
|
||||
raise error.PyAsn1Error(
|
||||
'%s not in asn1Spec: %s' % (tagSet, asn1Spec)
|
||||
)
|
||||
if debug.logger and debug.logger & debug.flagDecoder:
|
||||
debug.scope.pop()
|
||||
debug.logger('decoder left scope %s, call completed' % debug.scope)
|
||||
return value, substrate
|
||||
|
||||
decode = Decoder(tagMap, typeMap)
|
||||
|
||||
# XXX
|
||||
# non-recursive decoding; return position rather than substrate
|
||||
@@ -1,434 +0,0 @@
|
||||
# BER encoder
|
||||
from pyasn1 import debug, error
|
||||
from pyasn1.codec.ber import eoo
|
||||
from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs
|
||||
from pyasn1.type import base, tag, univ, char, useful
|
||||
|
||||
|
||||
class Error(Exception): pass
|
||||
|
||||
class AbstractItemEncoder:
|
||||
supportIndefLenMode = 1
|
||||
def encodeTag(self, t, isConstructed):
|
||||
tagClass, tagFormat, tagId = t.asTuple() # this is a hotspot
|
||||
v = tagClass | tagFormat
|
||||
if isConstructed:
|
||||
v = v|tag.tagFormatConstructed
|
||||
if tagId < 31:
|
||||
return int2oct(v|tagId)
|
||||
else:
|
||||
s = int2oct(tagId&0x7f)
|
||||
tagId = tagId >> 7
|
||||
while tagId:
|
||||
s = int2oct(0x80|(tagId&0x7f)) + s
|
||||
tagId = tagId >> 7
|
||||
return int2oct(v|0x1F) + s
|
||||
|
||||
def encodeLength(self, length, defMode):
|
||||
if not defMode and self.supportIndefLenMode:
|
||||
return int2oct(0x80)
|
||||
if length < 0x80:
|
||||
return int2oct(length)
|
||||
else:
|
||||
substrate = null
|
||||
while length:
|
||||
substrate = int2oct(length&0xff) + substrate
|
||||
length = length >> 8
|
||||
substrateLen = len(substrate)
|
||||
if substrateLen > 126:
|
||||
raise Error('Length octets overflow (%d)' % substrateLen)
|
||||
return int2oct(0x80 | substrateLen) + substrate
|
||||
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
raise Error('Not implemented')
|
||||
|
||||
def _encodeEndOfOctets(self, encodeFun, defMode):
|
||||
if defMode or not self.supportIndefLenMode:
|
||||
return null
|
||||
else:
|
||||
return encodeFun(eoo.endOfOctets, defMode)
|
||||
|
||||
def encode(self, encodeFun, value, defMode, maxChunkSize):
|
||||
substrate, isConstructed = self.encodeValue(
|
||||
encodeFun, value, defMode, maxChunkSize
|
||||
)
|
||||
tagSet = value.getTagSet()
|
||||
if tagSet:
|
||||
if not isConstructed: # primitive form implies definite mode
|
||||
defMode = 1
|
||||
return self.encodeTag(
|
||||
tagSet[-1], isConstructed
|
||||
) + self.encodeLength(
|
||||
len(substrate), defMode
|
||||
) + substrate + self._encodeEndOfOctets(encodeFun, defMode)
|
||||
else:
|
||||
return substrate # untagged value
|
||||
|
||||
class EndOfOctetsEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
return null, 0
|
||||
|
||||
class ExplicitlyTaggedItemEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
if isinstance(value, base.AbstractConstructedAsn1Item):
|
||||
value = value.clone(tagSet=value.getTagSet()[:-1],
|
||||
cloneValueFlag=1)
|
||||
else:
|
||||
value = value.clone(tagSet=value.getTagSet()[:-1])
|
||||
return encodeFun(value, defMode, maxChunkSize), 1
|
||||
|
||||
explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder()
|
||||
|
||||
class BooleanEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
_true = ints2octs((1,))
|
||||
_false = ints2octs((0,))
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
return value and self._true or self._false, 0
|
||||
|
||||
class IntegerEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
supportCompactZero = False
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
if value == 0: # shortcut for zero value
|
||||
if self.supportCompactZero:
|
||||
# this seems to be a correct way for encoding zeros
|
||||
return null, 0
|
||||
else:
|
||||
# this seems to be a widespread way for encoding zeros
|
||||
return ints2octs((0,)), 0
|
||||
octets = []
|
||||
value = int(value) # to save on ops on asn1 type
|
||||
while 1:
|
||||
octets.insert(0, value & 0xff)
|
||||
if value == 0 or value == -1:
|
||||
break
|
||||
value = value >> 8
|
||||
if value == 0 and octets[0] & 0x80:
|
||||
octets.insert(0, 0)
|
||||
while len(octets) > 1 and \
|
||||
(octets[0] == 0 and octets[1] & 0x80 == 0 or \
|
||||
octets[0] == 0xff and octets[1] & 0x80 != 0):
|
||||
del octets[0]
|
||||
return ints2octs(octets), 0
|
||||
|
||||
class BitStringEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
if not maxChunkSize or len(value) <= maxChunkSize*8:
|
||||
out_len = (len(value) + 7) // 8
|
||||
out_list = out_len * [0]
|
||||
j = 7
|
||||
i = -1
|
||||
for val in value:
|
||||
j += 1
|
||||
if j == 8:
|
||||
i += 1
|
||||
j = 0
|
||||
out_list[i] = out_list[i] | val << (7-j)
|
||||
return int2oct(7-j) + ints2octs(out_list), 0
|
||||
else:
|
||||
pos = 0; substrate = null
|
||||
while 1:
|
||||
# count in octets
|
||||
v = value.clone(value[pos*8:pos*8+maxChunkSize*8])
|
||||
if not v:
|
||||
break
|
||||
substrate = substrate + encodeFun(v, defMode, maxChunkSize)
|
||||
pos = pos + maxChunkSize
|
||||
return substrate, 1
|
||||
|
||||
class OctetStringEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
if not maxChunkSize or len(value) <= maxChunkSize:
|
||||
return value.asOctets(), 0
|
||||
else:
|
||||
pos = 0; substrate = null
|
||||
while 1:
|
||||
v = value.clone(value[pos:pos+maxChunkSize])
|
||||
if not v:
|
||||
break
|
||||
substrate = substrate + encodeFun(v, defMode, maxChunkSize)
|
||||
pos = pos + maxChunkSize
|
||||
return substrate, 1
|
||||
|
||||
class NullEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
return null, 0
|
||||
|
||||
class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
precomputedValues = {
|
||||
(1, 3, 6, 1, 2): (43, 6, 1, 2),
|
||||
(1, 3, 6, 1, 4): (43, 6, 1, 4)
|
||||
}
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
oid = value.asTuple()
|
||||
if oid[:5] in self.precomputedValues:
|
||||
octets = self.precomputedValues[oid[:5]]
|
||||
oid = oid[5:]
|
||||
else:
|
||||
if len(oid) < 2:
|
||||
raise error.PyAsn1Error('Short OID %s' % (value,))
|
||||
|
||||
octets = ()
|
||||
|
||||
# Build the first twos
|
||||
if oid[0] == 0 and 0 <= oid[1] <= 39:
|
||||
oid = (oid[1],) + oid[2:]
|
||||
elif oid[0] == 1 and 0 <= oid[1] <= 39:
|
||||
oid = (oid[1] + 40,) + oid[2:]
|
||||
elif oid[0] == 2:
|
||||
oid = (oid[1] + 80,) + oid[2:]
|
||||
else:
|
||||
raise error.PyAsn1Error(
|
||||
'Impossible initial arcs %s at %s' % (oid[:2], value)
|
||||
)
|
||||
|
||||
# Cycle through subIds
|
||||
for subId in oid:
|
||||
if subId > -1 and subId < 128:
|
||||
# Optimize for the common case
|
||||
octets = octets + (subId & 0x7f,)
|
||||
elif subId < 0:
|
||||
raise error.PyAsn1Error(
|
||||
'Negative OID arc %s at %s' % (subId, value)
|
||||
)
|
||||
else:
|
||||
# Pack large Sub-Object IDs
|
||||
res = (subId & 0x7f,)
|
||||
subId = subId >> 7
|
||||
while subId > 0:
|
||||
res = (0x80 | (subId & 0x7f),) + res
|
||||
subId = subId >> 7
|
||||
# Add packed Sub-Object ID to resulted Object ID
|
||||
octets += res
|
||||
|
||||
return ints2octs(octets), 0
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
binEncBase = 2 # set to None to choose encoding base automatically
|
||||
def _dropFloatingPoint(self, m, encbase, e):
|
||||
ms, es = 1, 1
|
||||
if m < 0:
|
||||
ms = -1 # mantissa sign
|
||||
if e < 0:
|
||||
es = -1 # exponenta sign
|
||||
m *= ms
|
||||
if encbase == 8:
|
||||
m = m*2**(abs(e) % 3 * es)
|
||||
e = abs(e) // 3 * es
|
||||
elif encbase == 16:
|
||||
m = m*2**(abs(e) % 4 * es)
|
||||
e = abs(e) // 4 * es
|
||||
|
||||
while 1:
|
||||
if int(m) != m:
|
||||
m *= encbase
|
||||
e -= 1
|
||||
continue
|
||||
break
|
||||
return ms, int(m), encbase, e
|
||||
|
||||
def _chooseEncBase(self, value):
|
||||
m, b, e = value
|
||||
base = [2, 8, 16]
|
||||
if value.binEncBase in base:
|
||||
return self._dropFloatingPoint(m, value.binEncBase, e)
|
||||
elif self.binEncBase in base:
|
||||
return self._dropFloatingPoint(m, self.binEncBase, e)
|
||||
# auto choosing base 2/8/16
|
||||
mantissa = [m, m, m]
|
||||
exponenta = [e, e, e]
|
||||
encbase = 2
|
||||
e = float('inf')
|
||||
for i in range(3):
|
||||
sign, mantissa[i], base[i], exponenta[i] = \
|
||||
self._dropFloatingPoint(mantissa[i], base[i], exponenta[i])
|
||||
if abs(exponenta[i]) < abs(e) or \
|
||||
(abs(exponenta[i]) == abs(e) and mantissa[i] < m):
|
||||
e = exponenta[i]
|
||||
m = int(mantissa[i])
|
||||
encbase = base[i]
|
||||
return sign, m, encbase, e
|
||||
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
if value.isPlusInfinity():
|
||||
return int2oct(0x40), 0
|
||||
if value.isMinusInfinity():
|
||||
return int2oct(0x41), 0
|
||||
m, b, e = value
|
||||
if not m:
|
||||
return null, 0
|
||||
if b == 10:
|
||||
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0
|
||||
elif b == 2:
|
||||
fo = 0x80 # binary encoding
|
||||
ms, m, encbase, e = self._chooseEncBase(value)
|
||||
if ms < 0: # mantissa sign
|
||||
fo = fo | 0x40 # sign bit
|
||||
# exponenta & mantissa normalization
|
||||
if encbase == 2:
|
||||
while m & 0x1 == 0:
|
||||
m >>= 1
|
||||
e += 1
|
||||
elif encbase == 8:
|
||||
while m & 0x7 == 0:
|
||||
m >>= 3
|
||||
e += 1
|
||||
fo |= 0x10
|
||||
else: # encbase = 16
|
||||
while m & 0xf == 0:
|
||||
m >>= 4
|
||||
e += 1
|
||||
fo |= 0x20
|
||||
sf = 0 # scale factor
|
||||
while m & 0x1 == 0:
|
||||
m >>= 1
|
||||
sf += 1
|
||||
if sf > 3:
|
||||
raise error.PyAsn1Error('Scale factor overflow') # bug if raised
|
||||
fo |= sf << 2
|
||||
eo = null
|
||||
if e == 0 or e == -1:
|
||||
eo = int2oct(e&0xff)
|
||||
else:
|
||||
while e not in (0, -1):
|
||||
eo = int2oct(e&0xff) + eo
|
||||
e >>= 8
|
||||
if e == 0 and eo and oct2int(eo[0]) & 0x80:
|
||||
eo = int2oct(0) + eo
|
||||
if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
|
||||
eo = int2oct(0xff) + eo
|
||||
n = len(eo)
|
||||
if n > 0xff:
|
||||
raise error.PyAsn1Error('Real exponent overflow')
|
||||
if n == 1:
|
||||
pass
|
||||
elif n == 2:
|
||||
fo |= 1
|
||||
elif n == 3:
|
||||
fo |= 2
|
||||
else:
|
||||
fo |= 3
|
||||
eo = int2oct(n&0xff) + eo
|
||||
po = null
|
||||
while m:
|
||||
po = int2oct(m&0xff) + po
|
||||
m >>= 8
|
||||
substrate = int2oct(fo) + eo + po
|
||||
return substrate, 0
|
||||
else:
|
||||
raise error.PyAsn1Error('Prohibited Real base %s' % b)
|
||||
|
||||
class SequenceEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
value.setDefaultComponents()
|
||||
value.verifySizeSpec()
|
||||
substrate = null; idx = len(value)
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
if value[idx] is None: # Optional component
|
||||
continue
|
||||
component = value.getDefaultComponentByPosition(idx)
|
||||
if component is not None and component == value[idx]:
|
||||
continue
|
||||
substrate = encodeFun(
|
||||
value[idx], defMode, maxChunkSize
|
||||
) + substrate
|
||||
return substrate, 1
|
||||
|
||||
class SequenceOfEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
value.verifySizeSpec()
|
||||
substrate = null; idx = len(value)
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
substrate = encodeFun(
|
||||
value[idx], defMode, maxChunkSize
|
||||
) + substrate
|
||||
return substrate, 1
|
||||
|
||||
class ChoiceEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
return encodeFun(value.getComponent(), defMode, maxChunkSize), 1
|
||||
|
||||
class AnyEncoder(OctetStringEncoder):
|
||||
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
|
||||
return value.asOctets(), defMode == 0
|
||||
|
||||
tagMap = {
|
||||
eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SequenceOf.tagSet: SequenceOfEncoder(),
|
||||
univ.SetOf.tagSet: SequenceOfEncoder(),
|
||||
univ.Choice.tagSet: ChoiceEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.tagSet: OctetStringEncoder(),
|
||||
char.NumericString.tagSet: OctetStringEncoder(),
|
||||
char.PrintableString.tagSet: OctetStringEncoder(),
|
||||
char.TeletexString.tagSet: OctetStringEncoder(),
|
||||
char.VideotexString.tagSet: OctetStringEncoder(),
|
||||
char.IA5String.tagSet: OctetStringEncoder(),
|
||||
char.GraphicString.tagSet: OctetStringEncoder(),
|
||||
char.VisibleString.tagSet: OctetStringEncoder(),
|
||||
char.GeneralString.tagSet: OctetStringEncoder(),
|
||||
char.UniversalString.tagSet: OctetStringEncoder(),
|
||||
char.BMPString.tagSet: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
|
||||
useful.UTCTime.tagSet: OctetStringEncoder()
|
||||
}
|
||||
|
||||
# Type-to-codec map for ambiguous ASN.1 types
|
||||
typeMap = {
|
||||
univ.Set.typeId: SequenceEncoder(),
|
||||
univ.SetOf.typeId: SequenceOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder(),
|
||||
univ.Choice.typeId: ChoiceEncoder(),
|
||||
univ.Any.typeId: AnyEncoder()
|
||||
}
|
||||
|
||||
class Encoder:
|
||||
supportIndefLength = True
|
||||
def __init__(self, tagMap, typeMap={}):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, value, defMode=True, maxChunkSize=0):
|
||||
if not defMode and not self.supportIndefLength:
|
||||
raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
|
||||
debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint()))
|
||||
tagSet = value.getTagSet()
|
||||
if len(tagSet) > 1:
|
||||
concreteEncoder = explicitlyTaggedItemEncoder
|
||||
else:
|
||||
if value.typeId is not None and value.typeId in self.__typeMap:
|
||||
concreteEncoder = self.__typeMap[value.typeId]
|
||||
elif tagSet in self.__tagMap:
|
||||
concreteEncoder = self.__tagMap[tagSet]
|
||||
else:
|
||||
tagSet = value.baseTagSet
|
||||
if tagSet in self.__tagMap:
|
||||
concreteEncoder = self.__tagMap[tagSet]
|
||||
else:
|
||||
raise Error('No encoder for %s' % (value,))
|
||||
debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
substrate = concreteEncoder.encode(
|
||||
self, value, defMode, maxChunkSize
|
||||
)
|
||||
debug.logger & debug.flagEncoder and debug.logger('built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate)))
|
||||
return substrate
|
||||
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
@@ -1,8 +0,0 @@
|
||||
from pyasn1.type import base, tag
|
||||
|
||||
class EndOfOctets(base.AbstractSimpleAsn1Item):
|
||||
defaultValue = 0
|
||||
tagSet = tag.initTagSet(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
|
||||
)
|
||||
endOfOctets = EndOfOctets()
|
||||
@@ -1,36 +0,0 @@
|
||||
# CER decoder
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import decoder
|
||||
from pyasn1.compat.octets import oct2int
|
||||
from pyasn1.type import univ
|
||||
|
||||
|
||||
class BooleanDecoder(decoder.AbstractSimpleDecoder):
|
||||
protoComponent = univ.Boolean(0)
|
||||
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
|
||||
state, decodeFun, substrateFun):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if not head or length != 1:
|
||||
raise error.PyAsn1Error('Not single-octet Boolean payload')
|
||||
byte = oct2int(head[0])
|
||||
# CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
|
||||
# BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
|
||||
# in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
|
||||
if byte == 0xff:
|
||||
value = 1
|
||||
elif byte == 0x00:
|
||||
value = 0
|
||||
else:
|
||||
raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
|
||||
return self._createComponent(asn1Spec, tagSet, value), tail
|
||||
|
||||
tagMap = decoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
univ.Boolean.tagSet: BooleanDecoder()
|
||||
})
|
||||
|
||||
typeMap = decoder.typeMap
|
||||
|
||||
class Decoder(decoder.Decoder): pass
|
||||
|
||||
decode = Decoder(tagMap, decoder.typeMap)
|
||||
@@ -1,131 +0,0 @@
|
||||
# CER encoder
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import encoder
|
||||
from pyasn1.compat.octets import int2oct, str2octs, null
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
|
||||
class BooleanEncoder(encoder.IntegerEncoder):
|
||||
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
|
||||
if client == 0:
|
||||
substrate = int2oct(0)
|
||||
else:
|
||||
substrate = int2oct(255)
|
||||
return substrate, 0
|
||||
|
||||
class BitStringEncoder(encoder.BitStringEncoder):
|
||||
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
|
||||
return encoder.BitStringEncoder.encodeValue(
|
||||
self, encodeFun, client, defMode, 1000
|
||||
)
|
||||
|
||||
class OctetStringEncoder(encoder.OctetStringEncoder):
|
||||
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
|
||||
return encoder.OctetStringEncoder.encodeValue(
|
||||
self, encodeFun, client, defMode, 1000
|
||||
)
|
||||
|
||||
class RealEncoder(encoder.RealEncoder):
|
||||
def _chooseEncBase(self, value):
|
||||
m, b, e = value
|
||||
return self._dropFloatingPoint(m, b, e)
|
||||
|
||||
# specialized GeneralStringEncoder here
|
||||
|
||||
class GeneralizedTimeEncoder(OctetStringEncoder):
|
||||
zchar = str2octs('Z')
|
||||
pluschar = str2octs('+')
|
||||
minuschar = str2octs('-')
|
||||
zero = str2octs('0')
|
||||
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
|
||||
octets = client.asOctets()
|
||||
# This breaks too many existing data items
|
||||
# if '.' not in octets:
|
||||
# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
|
||||
if len(octets) < 15:
|
||||
raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
|
||||
if self.pluschar in octets or self.minuschar in octets:
|
||||
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
|
||||
if octets[-1] != self.zchar[0]:
|
||||
raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
|
||||
return encoder.OctetStringEncoder.encodeValue(
|
||||
self, encodeFun, client, defMode, 1000
|
||||
)
|
||||
|
||||
class UTCTimeEncoder(encoder.OctetStringEncoder):
|
||||
zchar = str2octs('Z')
|
||||
pluschar = str2octs('+')
|
||||
minuschar = str2octs('-')
|
||||
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
|
||||
octets = client.asOctets()
|
||||
if self.pluschar in octets or self.minuschar in octets:
|
||||
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
|
||||
if octets and octets[-1] != self.zchar[0]:
|
||||
client = client.clone(octets + self.zchar)
|
||||
if len(client) != 13:
|
||||
raise error.PyAsn1Error('Bad UTC time length: %r' % client)
|
||||
return encoder.OctetStringEncoder.encodeValue(
|
||||
self, encodeFun, client, defMode, 1000
|
||||
)
|
||||
|
||||
class SetOfEncoder(encoder.SequenceOfEncoder):
|
||||
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
|
||||
if isinstance(client, univ.SequenceAndSetBase):
|
||||
client.setDefaultComponents()
|
||||
client.verifySizeSpec()
|
||||
substrate = null; idx = len(client)
|
||||
# This is certainly a hack but how else do I distinguish SetOf
|
||||
# from Set if they have the same tags&constraints?
|
||||
if isinstance(client, univ.SequenceAndSetBase):
|
||||
# Set
|
||||
comps = []
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
if client[idx] is None: # Optional component
|
||||
continue
|
||||
if client.getDefaultComponentByPosition(idx) == client[idx]:
|
||||
continue
|
||||
comps.append(client[idx])
|
||||
comps.sort(key=lambda x: isinstance(x, univ.Choice) and \
|
||||
x.getMinTagSet() or x.getTagSet())
|
||||
for c in comps:
|
||||
substrate += encodeFun(c, defMode, maxChunkSize)
|
||||
else:
|
||||
# SetOf
|
||||
compSubs = []
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
compSubs.append(
|
||||
encodeFun(client[idx], defMode, maxChunkSize)
|
||||
)
|
||||
compSubs.sort() # perhaps padding's not needed
|
||||
substrate = null
|
||||
for compSub in compSubs:
|
||||
substrate += compSub
|
||||
return substrate, 1
|
||||
|
||||
tagMap = encoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
|
||||
useful.UTCTime.tagSet: UTCTimeEncoder(),
|
||||
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
|
||||
})
|
||||
|
||||
typeMap = encoder.typeMap.copy()
|
||||
typeMap.update({
|
||||
univ.Set.typeId: SetOfEncoder(),
|
||||
univ.SetOf.typeId: SetOfEncoder()
|
||||
})
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
def __call__(self, client, defMode=False, maxChunkSize=0):
|
||||
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
|
||||
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
|
||||
# EncoderFactory queries class instance and builds a map of tags -> encoders
|
||||
@@ -1,9 +0,0 @@
|
||||
# DER decoder
|
||||
from pyasn1.codec.cer import decoder
|
||||
|
||||
tagMap = decoder.tagMap
|
||||
typeMap = decoder.typeMap
|
||||
class Decoder(decoder.Decoder):
|
||||
supportIndefLength = False
|
||||
|
||||
decode = Decoder(tagMap, typeMap)
|
||||
@@ -1,33 +0,0 @@
|
||||
# DER encoder
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.cer import encoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
|
||||
class SetOfEncoder(encoder.SetOfEncoder):
|
||||
def _cmpSetComponents(self, c1, c2):
|
||||
tagSet1 = isinstance(c1, univ.Choice) and \
|
||||
c1.getEffectiveTagSet() or c1.getTagSet()
|
||||
tagSet2 = isinstance(c2, univ.Choice) and \
|
||||
c2.getEffectiveTagSet() or c2.getTagSet()
|
||||
return cmp(tagSet1, tagSet2)
|
||||
|
||||
tagMap = encoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
# Overload CER encoders with BER ones (a bit hackerish XXX)
|
||||
univ.BitString.tagSet: encoder.encoder.BitStringEncoder(),
|
||||
univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(),
|
||||
# Set & SetOf have same tags
|
||||
univ.SetOf().tagSet: SetOfEncoder()
|
||||
})
|
||||
|
||||
typeMap = encoder.typeMap
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
supportIndefLength = False
|
||||
def __call__(self, client, defMode=True, maxChunkSize=0):
|
||||
if not defMode:
|
||||
raise error.PyAsn1Error('DER forbids indefinite length mode')
|
||||
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
|
||||
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
@@ -1,10 +0,0 @@
|
||||
from sys import version_info
|
||||
|
||||
if version_info[0:2] < (2, 6):
|
||||
def bin(x):
|
||||
if x <= 1:
|
||||
return '0b'+str(x)
|
||||
else:
|
||||
return bin(x>>1) + str(x&1)
|
||||
else:
|
||||
bin = bin
|
||||
@@ -1,22 +0,0 @@
|
||||
from sys import version_info
|
||||
|
||||
if version_info[0] <= 2:
|
||||
int2oct = chr
|
||||
ints2octs = lambda s: ''.join([ int2oct(x) for x in s ])
|
||||
null = ''
|
||||
oct2int = ord
|
||||
octs2ints = lambda s: [ oct2int(x) for x in s ]
|
||||
str2octs = lambda x: x
|
||||
octs2str = lambda x: x
|
||||
isOctetsType = lambda s: isinstance(s, str)
|
||||
isStringType = lambda s: isinstance(s, (str, unicode))
|
||||
else:
|
||||
ints2octs = bytes
|
||||
int2oct = lambda x: ints2octs((x,))
|
||||
null = ints2octs()
|
||||
oct2int = lambda x: x
|
||||
octs2ints = lambda s: [ x for x in s ]
|
||||
str2octs = lambda x: x.encode()
|
||||
octs2str = lambda x: x.decode()
|
||||
isOctetsType = lambda s: isinstance(s, bytes)
|
||||
isStringType = lambda s: isinstance(s, str)
|
||||
@@ -1,110 +0,0 @@
|
||||
import logging
|
||||
|
||||
from pyasn1 import __version__
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat.octets import octs2ints
|
||||
|
||||
flagNone = 0x0000
|
||||
flagEncoder = 0x0001
|
||||
flagDecoder = 0x0002
|
||||
flagAll = 0xffff
|
||||
|
||||
flagMap = {
|
||||
'encoder': flagEncoder,
|
||||
'decoder': flagDecoder,
|
||||
'all': flagAll
|
||||
}
|
||||
|
||||
class Printer:
|
||||
def __init__(self, logger=None, handler=None, formatter=None):
|
||||
if logger is None:
|
||||
logger = logging.getLogger('pyasn1')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
if handler is None:
|
||||
handler = logging.StreamHandler()
|
||||
if formatter is None:
|
||||
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
|
||||
handler.setFormatter(formatter)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
logger.addHandler(handler)
|
||||
self.__logger = logger
|
||||
|
||||
def __call__(self, msg): self.__logger.debug(msg)
|
||||
def __str__(self): return '<python built-in logging>'
|
||||
|
||||
if hasattr(logging, 'NullHandler'):
|
||||
NullHandler = logging.NullHandler
|
||||
else:
|
||||
# Python 2.6 and older
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
class Debug:
|
||||
defaultPrinter = None
|
||||
def __init__(self, *flags, **options):
|
||||
self._flags = flagNone
|
||||
if options.get('printer') is not None:
|
||||
self._printer = options.get('printer')
|
||||
elif self.defaultPrinter is not None:
|
||||
self._printer = self.defaultPrinter
|
||||
if 'loggerName' in options:
|
||||
# route our logs to parent logger
|
||||
self._printer = Printer(
|
||||
logger=logging.getLogger(options['loggerName']),
|
||||
handler=NullHandler()
|
||||
)
|
||||
else:
|
||||
self._printer = Printer()
|
||||
self('running pyasn1 version %s' % __version__)
|
||||
for f in flags:
|
||||
inverse = f and f[0] in ('!', '~')
|
||||
if inverse:
|
||||
f = f[1:]
|
||||
try:
|
||||
if inverse:
|
||||
self._flags &= ~flagMap[f]
|
||||
else:
|
||||
self._flags |= flagMap[f]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('bad debug flag %s' % f)
|
||||
|
||||
self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled'))
|
||||
|
||||
def __str__(self):
|
||||
return 'logger %s, flags %x' % (self._printer, self._flags)
|
||||
|
||||
def __call__(self, msg):
|
||||
self._printer(msg)
|
||||
|
||||
def __and__(self, flag):
|
||||
return self._flags & flag
|
||||
|
||||
def __rand__(self, flag):
|
||||
return flag & self._flags
|
||||
|
||||
logger = 0
|
||||
|
||||
def setLogger(l):
|
||||
global logger
|
||||
logger = l
|
||||
|
||||
def hexdump(octets):
|
||||
return ' '.join(
|
||||
[ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x)
|
||||
for n,x in zip(range(len(octets)), octs2ints(octets)) ]
|
||||
)
|
||||
|
||||
class Scope:
|
||||
def __init__(self):
|
||||
self._list = []
|
||||
|
||||
def __str__(self): return '.'.join(self._list)
|
||||
|
||||
def push(self, token):
|
||||
self._list.append(token)
|
||||
|
||||
def pop(self):
|
||||
return self._list.pop()
|
||||
|
||||
scope = Scope()
|
||||
@@ -1,3 +0,0 @@
|
||||
class PyAsn1Error(Exception): pass
|
||||
class ValueConstraintError(PyAsn1Error): pass
|
||||
class SubstrateUnderrunError(PyAsn1Error): pass
|
||||
@@ -1,280 +0,0 @@
|
||||
# Base classes for ASN.1 types
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import constraint, tagmap, tag
|
||||
|
||||
|
||||
class Asn1Item: pass
|
||||
|
||||
class Asn1ItemBase(Asn1Item):
|
||||
# Set of tags for this ASN.1 type
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
# A list of constraint.Constraint instances for checking values
|
||||
subtypeSpec = constraint.ConstraintsIntersection()
|
||||
|
||||
# Used for ambiguous ASN.1 types identification
|
||||
typeId = None
|
||||
|
||||
def __init__(self, tagSet=None, subtypeSpec=None):
|
||||
if tagSet is None:
|
||||
self._tagSet = self.tagSet
|
||||
else:
|
||||
self._tagSet = tagSet
|
||||
if subtypeSpec is None:
|
||||
self._subtypeSpec = self.subtypeSpec
|
||||
else:
|
||||
self._subtypeSpec = subtypeSpec
|
||||
|
||||
def _verifySubtypeSpec(self, value, idx=None):
|
||||
try:
|
||||
self._subtypeSpec(value, idx)
|
||||
except error.PyAsn1Error:
|
||||
c, i, t = sys.exc_info()
|
||||
raise c('%s at %s' % (i, self.__class__.__name__))
|
||||
|
||||
def getSubtypeSpec(self): return self._subtypeSpec
|
||||
|
||||
def getTagSet(self): return self._tagSet
|
||||
def getEffectiveTagSet(self): return self._tagSet # used by untagged types
|
||||
def getTagMap(self): return tagmap.TagMap({self._tagSet: self})
|
||||
|
||||
def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
|
||||
return self is other or \
|
||||
(not matchTags or \
|
||||
self._tagSet == other.getTagSet()) and \
|
||||
(not matchConstraints or \
|
||||
self._subtypeSpec==other.getSubtypeSpec())
|
||||
|
||||
def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
|
||||
"""Returns true if argument is a ASN1 subtype of ourselves"""
|
||||
return (not matchTags or \
|
||||
self._tagSet.isSuperTagSetOf(other.getTagSet())) and \
|
||||
(not matchConstraints or \
|
||||
(self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())))
|
||||
|
||||
class NoValue:
|
||||
def __getattr__(self, attr):
|
||||
raise error.PyAsn1Error('No value for %s()' % attr)
|
||||
def __getitem__(self, i):
|
||||
raise error.PyAsn1Error('No value')
|
||||
def __repr__(self): return '%s()' % self.__class__.__name__
|
||||
|
||||
noValue = NoValue()
|
||||
|
||||
# Base class for "simple" ASN.1 objects. These are immutable.
|
||||
class AbstractSimpleAsn1Item(Asn1ItemBase):
|
||||
defaultValue = noValue
|
||||
def __init__(self, value=None, tagSet=None, subtypeSpec=None):
|
||||
Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
|
||||
if value is None or value is noValue:
|
||||
value = self.defaultValue
|
||||
if value is None or value is noValue:
|
||||
self.__hashedValue = value = noValue
|
||||
else:
|
||||
value = self.prettyIn(value)
|
||||
self._verifySubtypeSpec(value)
|
||||
self.__hashedValue = hash(value)
|
||||
self._value = value
|
||||
self._len = None
|
||||
|
||||
def __repr__(self):
|
||||
r = []
|
||||
if self._value is not self.defaultValue:
|
||||
r.append(self.prettyOut(self._value))
|
||||
if self._tagSet is not self.tagSet:
|
||||
r.append('tagSet=%r' % (self._tagSet,))
|
||||
if self._subtypeSpec is not self.subtypeSpec:
|
||||
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
|
||||
|
||||
def __str__(self): return str(self._value)
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._value == other
|
||||
def __ne__(self, other): return self._value != other
|
||||
def __lt__(self, other): return self._value < other
|
||||
def __le__(self, other): return self._value <= other
|
||||
def __gt__(self, other): return self._value > other
|
||||
def __ge__(self, other): return self._value >= other
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self): return bool(self._value)
|
||||
else:
|
||||
def __bool__(self): return bool(self._value)
|
||||
def __hash__(self):
|
||||
return self.__hashedValue is noValue and hash(noValue) or self.__hashedValue
|
||||
|
||||
def hasValue(self):
|
||||
return not isinstance(self._value, NoValue)
|
||||
|
||||
def clone(self, value=None, tagSet=None, subtypeSpec=None):
|
||||
if value is None and tagSet is None and subtypeSpec is None:
|
||||
return self
|
||||
if value is None:
|
||||
value = self._value
|
||||
if tagSet is None:
|
||||
tagSet = self._tagSet
|
||||
if subtypeSpec is None:
|
||||
subtypeSpec = self._subtypeSpec
|
||||
return self.__class__(value, tagSet, subtypeSpec)
|
||||
|
||||
def subtype(self, value=None, implicitTag=None, explicitTag=None,
|
||||
subtypeSpec=None):
|
||||
if value is None:
|
||||
value = self._value
|
||||
if implicitTag is not None:
|
||||
tagSet = self._tagSet.tagImplicitly(implicitTag)
|
||||
elif explicitTag is not None:
|
||||
tagSet = self._tagSet.tagExplicitly(explicitTag)
|
||||
else:
|
||||
tagSet = self._tagSet
|
||||
if subtypeSpec is None:
|
||||
subtypeSpec = self._subtypeSpec
|
||||
else:
|
||||
subtypeSpec = subtypeSpec + self._subtypeSpec
|
||||
return self.__class__(value, tagSet, subtypeSpec)
|
||||
|
||||
def prettyIn(self, value): return value
|
||||
def prettyOut(self, value): return str(value)
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
if self.hasValue():
|
||||
return self.prettyOut(self._value)
|
||||
else:
|
||||
return '<no value>'
|
||||
|
||||
# XXX Compatibility stub
|
||||
def prettyPrinter(self, scope=0): return self.prettyPrint(scope)
|
||||
|
||||
def prettyPrintType(self, scope=0):
|
||||
return '%s -> %s' % (self.getTagSet(), self.__class__.__name__)
|
||||
|
||||
#
|
||||
# Constructed types:
|
||||
# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
|
||||
# * ASN1 types and values are represened by Python class instances
|
||||
# * Value initialization is made for defaulted components only
|
||||
# * Primary method of component addressing is by-position. Data model for base
|
||||
# type is Python sequence. Additional type-specific addressing methods
|
||||
# may be implemented for particular types.
|
||||
# * SequenceOf and SetOf types do not implement any additional methods
|
||||
# * Sequence, Set and Choice types also implement by-identifier addressing
|
||||
# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
|
||||
# * Sequence and Set types may include optional and defaulted
|
||||
# components
|
||||
# * Constructed types hold a reference to component types used for value
|
||||
# verification and ordering.
|
||||
# * Component type is a scalar type for SequenceOf/SetOf types and a list
|
||||
# of types for Sequence/Set/Choice.
|
||||
#
|
||||
|
||||
class AbstractConstructedAsn1Item(Asn1ItemBase):
|
||||
componentType = None
|
||||
sizeSpec = constraint.ConstraintsIntersection()
|
||||
def __init__(self, componentType=None, tagSet=None,
|
||||
subtypeSpec=None, sizeSpec=None):
|
||||
Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
|
||||
if componentType is None:
|
||||
self._componentType = self.componentType
|
||||
else:
|
||||
self._componentType = componentType
|
||||
if sizeSpec is None:
|
||||
self._sizeSpec = self.sizeSpec
|
||||
else:
|
||||
self._sizeSpec = sizeSpec
|
||||
self._componentValues = []
|
||||
self._componentValuesSet = 0
|
||||
|
||||
def __repr__(self):
|
||||
r = []
|
||||
if self._componentType is not self.componentType:
|
||||
r.append('componentType=%r' % (self._componentType,))
|
||||
if self._tagSet is not self.tagSet:
|
||||
r.append('tagSet=%r' % (self._tagSet,))
|
||||
if self._subtypeSpec is not self.subtypeSpec:
|
||||
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
|
||||
r = '%s(%s)' % (self.__class__.__name__, ', '.join(r))
|
||||
if self._componentValues:
|
||||
r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues])
|
||||
return r
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._componentValues == other
|
||||
def __ne__(self, other): return self._componentValues != other
|
||||
def __lt__(self, other): return self._componentValues < other
|
||||
def __le__(self, other): return self._componentValues <= other
|
||||
def __gt__(self, other): return self._componentValues > other
|
||||
def __ge__(self, other): return self._componentValues >= other
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self): return bool(self._componentValues)
|
||||
else:
|
||||
def __bool__(self): return bool(self._componentValues)
|
||||
|
||||
def getComponentTagMap(self):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def _cloneComponentValues(self, myClone, cloneValueFlag): pass
|
||||
|
||||
def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None,
|
||||
cloneValueFlag=None):
|
||||
if tagSet is None:
|
||||
tagSet = self._tagSet
|
||||
if subtypeSpec is None:
|
||||
subtypeSpec = self._subtypeSpec
|
||||
if sizeSpec is None:
|
||||
sizeSpec = self._sizeSpec
|
||||
r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
|
||||
if cloneValueFlag:
|
||||
self._cloneComponentValues(r, cloneValueFlag)
|
||||
return r
|
||||
|
||||
def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None,
|
||||
sizeSpec=None, cloneValueFlag=None):
|
||||
if implicitTag is not None:
|
||||
tagSet = self._tagSet.tagImplicitly(implicitTag)
|
||||
elif explicitTag is not None:
|
||||
tagSet = self._tagSet.tagExplicitly(explicitTag)
|
||||
else:
|
||||
tagSet = self._tagSet
|
||||
if subtypeSpec is None:
|
||||
subtypeSpec = self._subtypeSpec
|
||||
else:
|
||||
subtypeSpec = subtypeSpec + self._subtypeSpec
|
||||
if sizeSpec is None:
|
||||
sizeSpec = self._sizeSpec
|
||||
else:
|
||||
sizeSpec = sizeSpec + self._sizeSpec
|
||||
r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
|
||||
if cloneValueFlag:
|
||||
self._cloneComponentValues(r, cloneValueFlag)
|
||||
return r
|
||||
|
||||
def _verifyComponent(self, idx, value): pass
|
||||
|
||||
def verifySizeSpec(self): self._sizeSpec(self)
|
||||
|
||||
def getComponentByPosition(self, idx):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
def setComponentByPosition(self, idx, value, verifyConstraints=True):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def setComponents(self, *args, **kwargs):
|
||||
for idx in range(len(args)):
|
||||
self[idx] = args[idx]
|
||||
for k in kwargs:
|
||||
self[k] = kwargs[k]
|
||||
return self
|
||||
|
||||
def getComponentType(self): return self._componentType
|
||||
|
||||
def setDefaultComponents(self): pass
|
||||
|
||||
def __getitem__(self, idx): return self.getComponentByPosition(idx)
|
||||
def __setitem__(self, idx, value): self.setComponentByPosition(idx, value)
|
||||
|
||||
def __len__(self): return len(self._componentValues)
|
||||
|
||||
def clear(self):
|
||||
self._componentValues = []
|
||||
self._componentValuesSet = 0
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
# ASN.1 "character string" types
|
||||
from pyasn1.type import univ, tag
|
||||
|
||||
class NumericString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
|
||||
)
|
||||
|
||||
class PrintableString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
|
||||
)
|
||||
|
||||
class TeletexString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
|
||||
)
|
||||
|
||||
class T61String(TeletexString): pass
|
||||
|
||||
class VideotexString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
|
||||
)
|
||||
|
||||
class IA5String(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
|
||||
)
|
||||
|
||||
class GraphicString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
|
||||
)
|
||||
|
||||
class VisibleString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
|
||||
)
|
||||
|
||||
class ISO646String(VisibleString): pass
|
||||
|
||||
class GeneralString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
|
||||
)
|
||||
|
||||
class UniversalString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
|
||||
)
|
||||
encoding = "utf-32-be"
|
||||
|
||||
class BMPString(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
|
||||
)
|
||||
encoding = "utf-16-be"
|
||||
|
||||
class UTF8String(univ.OctetString):
|
||||
tagSet = univ.OctetString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
|
||||
)
|
||||
encoding = "utf-8"
|
||||
@@ -1,202 +0,0 @@
|
||||
#
|
||||
# ASN.1 subtype constraints classes.
|
||||
#
|
||||
# Constraints are relatively rare, but every ASN1 object
|
||||
# is doing checks all the time for whether they have any
|
||||
# constraints and whether they are applicable to the object.
|
||||
#
|
||||
# What we're going to do is define objects/functions that
|
||||
# can be called unconditionally if they are present, and that
|
||||
# are simply not present if there are no constraints.
|
||||
#
|
||||
# Original concept and code by Mike C. Fletcher.
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1.type import error
|
||||
|
||||
|
||||
class AbstractConstraint:
|
||||
"""Abstract base-class for constraint objects
|
||||
|
||||
Constraints should be stored in a simple sequence in the
|
||||
namespace of their client Asn1Item sub-classes.
|
||||
"""
|
||||
def __init__(self, *values):
|
||||
self._valueMap = {}
|
||||
self._setValues(values)
|
||||
self.__hashedValues = None
|
||||
def __call__(self, value, idx=None):
|
||||
try:
|
||||
self._testValue(value, idx)
|
||||
except error.ValueConstraintError:
|
||||
raise error.ValueConstraintError(
|
||||
'%s failed at: \"%s\"' % (self, sys.exc_info()[1])
|
||||
)
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (
|
||||
self.__class__.__name__,
|
||||
', '.join([repr(x) for x in self._values])
|
||||
)
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._values == other
|
||||
def __ne__(self, other): return self._values != other
|
||||
def __lt__(self, other): return self._values < other
|
||||
def __le__(self, other): return self._values <= other
|
||||
def __gt__(self, other): return self._values > other
|
||||
def __ge__(self, other): return self._values >= other
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self): return bool(self._values)
|
||||
else:
|
||||
def __bool__(self): return bool(self._values)
|
||||
|
||||
def __hash__(self):
|
||||
if self.__hashedValues is None:
|
||||
self.__hashedValues = hash((self.__class__.__name__, self._values))
|
||||
return self.__hashedValues
|
||||
|
||||
def _setValues(self, values): self._values = values
|
||||
def _testValue(self, value, idx):
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
# Constraints derivation logic
|
||||
def getValueMap(self): return self._valueMap
|
||||
def isSuperTypeOf(self, otherConstraint):
|
||||
return self in otherConstraint.getValueMap() or \
|
||||
otherConstraint is self or otherConstraint == self
|
||||
def isSubTypeOf(self, otherConstraint):
|
||||
return otherConstraint in self._valueMap or \
|
||||
otherConstraint is self or otherConstraint == self
|
||||
|
||||
class SingleValueConstraint(AbstractConstraint):
|
||||
"""Value must be part of defined values constraint"""
|
||||
def _testValue(self, value, idx):
|
||||
# XXX index vals for performance?
|
||||
if value not in self._values:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
class ContainedSubtypeConstraint(AbstractConstraint):
|
||||
"""Value must satisfy all of defined set of constraints"""
|
||||
def _testValue(self, value, idx):
|
||||
for c in self._values:
|
||||
c(value, idx)
|
||||
|
||||
class ValueRangeConstraint(AbstractConstraint):
|
||||
"""Value must be within start and stop values (inclusive)"""
|
||||
def _testValue(self, value, idx):
|
||||
if value < self.start or value > self.stop:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
if len(values) != 2:
|
||||
raise error.PyAsn1Error(
|
||||
'%s: bad constraint values' % (self.__class__.__name__,)
|
||||
)
|
||||
self.start, self.stop = values
|
||||
if self.start > self.stop:
|
||||
raise error.PyAsn1Error(
|
||||
'%s: screwed constraint values (start > stop): %s > %s' % (
|
||||
self.__class__.__name__,
|
||||
self.start, self.stop
|
||||
)
|
||||
)
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
class ValueSizeConstraint(ValueRangeConstraint):
|
||||
"""len(value) must be within start and stop values (inclusive)"""
|
||||
def _testValue(self, value, idx):
|
||||
l = len(value)
|
||||
if l < self.start or l > self.stop:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
class PermittedAlphabetConstraint(SingleValueConstraint):
|
||||
def _setValues(self, values):
|
||||
self._values = ()
|
||||
for v in values:
|
||||
self._values = self._values + tuple(v)
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
for v in value:
|
||||
if v not in self._values:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
# This is a bit kludgy, meaning two op modes within a single constraing
|
||||
class InnerTypeConstraint(AbstractConstraint):
|
||||
"""Value must satisfy type and presense constraints"""
|
||||
def _testValue(self, value, idx):
|
||||
if self.__singleTypeConstraint:
|
||||
self.__singleTypeConstraint(value)
|
||||
elif self.__multipleTypeConstraint:
|
||||
if idx not in self.__multipleTypeConstraint:
|
||||
raise error.ValueConstraintError(value)
|
||||
constraint, status = self.__multipleTypeConstraint[idx]
|
||||
if status == 'ABSENT': # XXX presense is not checked!
|
||||
raise error.ValueConstraintError(value)
|
||||
constraint(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
self.__multipleTypeConstraint = {}
|
||||
self.__singleTypeConstraint = None
|
||||
for v in values:
|
||||
if isinstance(v, tuple):
|
||||
self.__multipleTypeConstraint[v[0]] = v[1], v[2]
|
||||
else:
|
||||
self.__singleTypeConstraint = v
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
# Boolean ops on constraints
|
||||
|
||||
class ConstraintsExclusion(AbstractConstraint):
|
||||
"""Value must not fit the single constraint"""
|
||||
def _testValue(self, value, idx):
|
||||
try:
|
||||
self._values[0](value, idx)
|
||||
except error.ValueConstraintError:
|
||||
return
|
||||
else:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
if len(values) != 1:
|
||||
raise error.PyAsn1Error('Single constraint expected')
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
class AbstractConstraintSet(AbstractConstraint):
|
||||
"""Value must not satisfy the single constraint"""
|
||||
def __getitem__(self, idx): return self._values[idx]
|
||||
|
||||
def __add__(self, value): return self.__class__(self, value)
|
||||
def __radd__(self, value): return self.__class__(self, value)
|
||||
|
||||
def __len__(self): return len(self._values)
|
||||
|
||||
# Constraints inclusion in sets
|
||||
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
for v in values:
|
||||
self._valueMap[v] = 1
|
||||
self._valueMap.update(v.getValueMap())
|
||||
|
||||
class ConstraintsIntersection(AbstractConstraintSet):
|
||||
"""Value must satisfy all constraints"""
|
||||
def _testValue(self, value, idx):
|
||||
for v in self._values:
|
||||
v(value, idx)
|
||||
|
||||
class ConstraintsUnion(AbstractConstraintSet):
|
||||
"""Value must satisfy at least one constraint"""
|
||||
def _testValue(self, value, idx):
|
||||
for v in self._values:
|
||||
try:
|
||||
v(value, idx)
|
||||
except error.ValueConstraintError:
|
||||
pass
|
||||
else:
|
||||
return
|
||||
raise error.ValueConstraintError(
|
||||
'all of %s failed for \"%s\"' % (self._values, value)
|
||||
)
|
||||
|
||||
# XXX
|
||||
# add tests for type check
|
||||
@@ -1,3 +0,0 @@
|
||||
from pyasn1.error import PyAsn1Error
|
||||
|
||||
class ValueConstraintError(PyAsn1Error): pass
|
||||
@@ -1,151 +0,0 @@
|
||||
# NamedType specification for constructed types
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import tagmap
|
||||
|
||||
|
||||
class NamedType:
|
||||
isOptional = 0
|
||||
isDefaulted = 0
|
||||
def __init__(self, name, t):
|
||||
self.__name = name; self.__type = t
|
||||
def __repr__(self): return '%s(%r, %r)' % (
|
||||
self.__class__.__name__, self.__name, self.__type
|
||||
)
|
||||
def __eq__(self, other): return tuple(self) == tuple(other)
|
||||
def __ne__(self, other): return tuple(self) != tuple(other)
|
||||
def __lt__(self, other): return tuple(self) < tuple(other)
|
||||
def __le__(self, other): return tuple(self) <= tuple(other)
|
||||
def __gt__(self, other): return tuple(self) > tuple(other)
|
||||
def __ge__(self, other): return tuple(self) >= tuple(other)
|
||||
def __hash__(self): return hash(tuple(self))
|
||||
|
||||
def getType(self): return self.__type
|
||||
def getName(self): return self.__name
|
||||
def __getitem__(self, idx):
|
||||
if idx == 0: return self.__name
|
||||
if idx == 1: return self.__type
|
||||
raise IndexError()
|
||||
|
||||
class OptionalNamedType(NamedType):
|
||||
isOptional = 1
|
||||
class DefaultedNamedType(NamedType):
|
||||
isDefaulted = 1
|
||||
|
||||
class NamedTypes:
|
||||
def __init__(self, *namedTypes):
|
||||
self.__namedTypes = namedTypes
|
||||
self.__namedTypesLen = len(self.__namedTypes)
|
||||
self.__minTagSet = None
|
||||
self.__tagToPosIdx = {}; self.__nameToPosIdx = {}
|
||||
self.__tagMap = { False: None, True: None }
|
||||
self.__ambigiousTypes = {}
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (
|
||||
self.__class__.__name__,
|
||||
', '.join([ repr(x) for x in self.__namedTypes ])
|
||||
)
|
||||
def __eq__(self, other): return tuple(self) == tuple(other)
|
||||
def __ne__(self, other): return tuple(self) != tuple(other)
|
||||
def __lt__(self, other): return tuple(self) < tuple(other)
|
||||
def __le__(self, other): return tuple(self) <= tuple(other)
|
||||
def __gt__(self, other): return tuple(self) > tuple(other)
|
||||
def __ge__(self, other): return tuple(self) >= tuple(other)
|
||||
def __hash__(self): return hash(tuple(self))
|
||||
|
||||
def __getitem__(self, idx): return self.__namedTypes[idx]
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self): return bool(self.__namedTypesLen)
|
||||
else:
|
||||
def __bool__(self): return bool(self.__namedTypesLen)
|
||||
def __len__(self): return self.__namedTypesLen
|
||||
|
||||
def clone(self): return self.__class__(*self.__namedTypes)
|
||||
|
||||
def getTypeByPosition(self, idx):
|
||||
if idx < 0 or idx >= self.__namedTypesLen:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
else:
|
||||
return self.__namedTypes[idx].getType()
|
||||
|
||||
def getPositionByType(self, tagSet):
|
||||
if not self.__tagToPosIdx:
|
||||
idx = self.__namedTypesLen
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
tagMap = self.__namedTypes[idx].getType().getTagMap()
|
||||
for t in tagMap.getPosMap():
|
||||
if t in self.__tagToPosIdx:
|
||||
raise error.PyAsn1Error('Duplicate type %s' % (t,))
|
||||
self.__tagToPosIdx[t] = idx
|
||||
try:
|
||||
return self.__tagToPosIdx[tagSet]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type %s not found' % (tagSet,))
|
||||
|
||||
def getNameByPosition(self, idx):
|
||||
try:
|
||||
return self.__namedTypes[idx].getName()
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
def getPositionByName(self, name):
|
||||
if not self.__nameToPosIdx:
|
||||
idx = self.__namedTypesLen
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
n = self.__namedTypes[idx].getName()
|
||||
if n in self.__nameToPosIdx:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (n,))
|
||||
self.__nameToPosIdx[n] = idx
|
||||
try:
|
||||
return self.__nameToPosIdx[name]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Name %s not found' % (name,))
|
||||
|
||||
def __buildAmbigiousTagMap(self):
|
||||
ambigiousTypes = ()
|
||||
idx = self.__namedTypesLen
|
||||
while idx > 0:
|
||||
idx = idx - 1
|
||||
t = self.__namedTypes[idx]
|
||||
if t.isOptional or t.isDefaulted:
|
||||
ambigiousTypes = (t, ) + ambigiousTypes
|
||||
else:
|
||||
ambigiousTypes = (t, )
|
||||
self.__ambigiousTypes[idx] = NamedTypes(*ambigiousTypes)
|
||||
|
||||
def getTagMapNearPosition(self, idx):
|
||||
if not self.__ambigiousTypes: self.__buildAmbigiousTagMap()
|
||||
try:
|
||||
return self.__ambigiousTypes[idx].getTagMap()
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionNearType(self, tagSet, idx):
|
||||
if not self.__ambigiousTypes: self.__buildAmbigiousTagMap()
|
||||
try:
|
||||
return idx+self.__ambigiousTypes[idx].getPositionByType(tagSet)
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def genMinTagSet(self):
|
||||
if self.__minTagSet is None:
|
||||
for t in self.__namedTypes:
|
||||
__type = t.getType()
|
||||
tagSet = getattr(__type,'getMinTagSet',__type.getTagSet)()
|
||||
if self.__minTagSet is None or tagSet < self.__minTagSet:
|
||||
self.__minTagSet = tagSet
|
||||
return self.__minTagSet
|
||||
|
||||
def getTagMap(self, uniq=False):
|
||||
if self.__tagMap[uniq] is None:
|
||||
tagMap = tagmap.TagMap()
|
||||
for nt in self.__namedTypes:
|
||||
tagMap = tagMap.clone(
|
||||
nt.getType(), nt.getType().getTagMap(), uniq
|
||||
)
|
||||
self.__tagMap[uniq] = tagMap
|
||||
return self.__tagMap[uniq]
|
||||
@@ -1,58 +0,0 @@
|
||||
# ASN.1 named integers
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = [ 'NamedValues' ]
|
||||
|
||||
class NamedValues:
|
||||
def __init__(self, *namedValues):
|
||||
self.nameToValIdx = {}; self.valToNameIdx = {}
|
||||
self.namedValues = ()
|
||||
automaticVal = 1
|
||||
for namedValue in namedValues:
|
||||
if isinstance(namedValue, tuple):
|
||||
name, val = namedValue
|
||||
else:
|
||||
name = namedValue
|
||||
val = automaticVal
|
||||
if name in self.nameToValIdx:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
self.nameToValIdx[name] = val
|
||||
if val in self.valToNameIdx:
|
||||
raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val))
|
||||
self.valToNameIdx[val] = name
|
||||
self.namedValues = self.namedValues + ((name, val),)
|
||||
automaticVal = automaticVal + 1
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues]))
|
||||
|
||||
def __str__(self): return str(self.namedValues)
|
||||
|
||||
def __eq__(self, other): return tuple(self) == tuple(other)
|
||||
def __ne__(self, other): return tuple(self) != tuple(other)
|
||||
def __lt__(self, other): return tuple(self) < tuple(other)
|
||||
def __le__(self, other): return tuple(self) <= tuple(other)
|
||||
def __gt__(self, other): return tuple(self) > tuple(other)
|
||||
def __ge__(self, other): return tuple(self) >= tuple(other)
|
||||
def __hash__(self): return hash(tuple(self))
|
||||
|
||||
def getName(self, value):
|
||||
if value in self.valToNameIdx:
|
||||
return self.valToNameIdx[value]
|
||||
|
||||
def getValue(self, name):
|
||||
if name in self.nameToValIdx:
|
||||
return self.nameToValIdx[name]
|
||||
|
||||
def __getitem__(self, i): return self.namedValues[i]
|
||||
def __len__(self): return len(self.namedValues)
|
||||
|
||||
def __add__(self, namedValues):
|
||||
return self.__class__(*self.namedValues + namedValues)
|
||||
def __radd__(self, namedValues):
|
||||
return self.__class__(*namedValues + tuple(self))
|
||||
|
||||
def clone(self, *namedValues):
|
||||
return self.__class__(*tuple(self) + namedValues)
|
||||
|
||||
# XXX clone/subtype?
|
||||
@@ -1,129 +0,0 @@
|
||||
# ASN.1 types tags
|
||||
from operator import getitem
|
||||
|
||||
from pyasn1 import error
|
||||
|
||||
tagClassUniversal = 0x00
|
||||
tagClassApplication = 0x40
|
||||
tagClassContext = 0x80
|
||||
tagClassPrivate = 0xC0
|
||||
|
||||
tagFormatSimple = 0x00
|
||||
tagFormatConstructed = 0x20
|
||||
|
||||
tagCategoryImplicit = 0x01
|
||||
tagCategoryExplicit = 0x02
|
||||
tagCategoryUntagged = 0x04
|
||||
|
||||
class Tag:
|
||||
def __init__(self, tagClass, tagFormat, tagId):
|
||||
if tagId < 0:
|
||||
raise error.PyAsn1Error(
|
||||
'Negative tag ID (%s) not allowed' % (tagId,)
|
||||
)
|
||||
self.__tag = (tagClass, tagFormat, tagId)
|
||||
self.uniq = (tagClass, tagId)
|
||||
self.__hashedUniqTag = hash(self.uniq)
|
||||
|
||||
def __str__(self):
|
||||
return '[%s:%s:%s]' % self.__tag
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
|
||||
(self.__class__.__name__,) + self.__tag
|
||||
)
|
||||
# These is really a hotspot -- expose public "uniq" attribute to save on
|
||||
# function calls
|
||||
def __eq__(self, other): return self.uniq == other.uniq
|
||||
def __ne__(self, other): return self.uniq != other.uniq
|
||||
def __lt__(self, other): return self.uniq < other.uniq
|
||||
def __le__(self, other): return self.uniq <= other.uniq
|
||||
def __gt__(self, other): return self.uniq > other.uniq
|
||||
def __ge__(self, other): return self.uniq >= other.uniq
|
||||
def __hash__(self): return self.__hashedUniqTag
|
||||
def __getitem__(self, idx): return self.__tag[idx]
|
||||
def __and__(self, otherTag):
|
||||
(tagClass, tagFormat, tagId) = otherTag
|
||||
return self.__class__(
|
||||
self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId
|
||||
)
|
||||
def __or__(self, otherTag):
|
||||
(tagClass, tagFormat, tagId) = otherTag
|
||||
return self.__class__(
|
||||
self.__tag[0]|tagClass,
|
||||
self.__tag[1]|tagFormat,
|
||||
self.__tag[2]|tagId
|
||||
)
|
||||
def asTuple(self): return self.__tag # __getitem__() is slow
|
||||
|
||||
class TagSet:
|
||||
def __init__(self, baseTag=(), *superTags):
|
||||
self.__baseTag = baseTag
|
||||
self.__superTags = superTags
|
||||
self.__hashedSuperTags = hash(superTags)
|
||||
_uniq = ()
|
||||
for t in superTags:
|
||||
_uniq = _uniq + t.uniq
|
||||
self.uniq = _uniq
|
||||
self.__lenOfSuperTags = len(superTags)
|
||||
|
||||
def __str__(self):
|
||||
return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]'
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (
|
||||
self.__class__.__name__,
|
||||
'(), ' + ', '.join([repr(x) for x in self.__superTags])
|
||||
)
|
||||
|
||||
def __add__(self, superTag):
|
||||
return self.__class__(
|
||||
self.__baseTag, *self.__superTags + (superTag,)
|
||||
)
|
||||
def __radd__(self, superTag):
|
||||
return self.__class__(
|
||||
self.__baseTag, *(superTag,) + self.__superTags
|
||||
)
|
||||
|
||||
def tagExplicitly(self, superTag):
|
||||
tagClass, tagFormat, tagId = superTag
|
||||
if tagClass == tagClassUniversal:
|
||||
raise error.PyAsn1Error(
|
||||
'Can\'t tag with UNIVERSAL-class tag'
|
||||
)
|
||||
if tagFormat != tagFormatConstructed:
|
||||
superTag = Tag(tagClass, tagFormatConstructed, tagId)
|
||||
return self + superTag
|
||||
|
||||
def tagImplicitly(self, superTag):
|
||||
tagClass, tagFormat, tagId = superTag
|
||||
if self.__superTags:
|
||||
superTag = Tag(tagClass, self.__superTags[-1][1], tagId)
|
||||
return self[:-1] + superTag
|
||||
|
||||
def getBaseTag(self): return self.__baseTag
|
||||
def __getitem__(self, idx):
|
||||
if isinstance(idx, slice):
|
||||
return self.__class__(
|
||||
self.__baseTag, *getitem(self.__superTags, idx)
|
||||
)
|
||||
return self.__superTags[idx]
|
||||
def __eq__(self, other): return self.uniq == other.uniq
|
||||
def __ne__(self, other): return self.uniq != other.uniq
|
||||
def __lt__(self, other): return self.uniq < other.uniq
|
||||
def __le__(self, other): return self.uniq <= other.uniq
|
||||
def __gt__(self, other): return self.uniq > other.uniq
|
||||
def __ge__(self, other): return self.uniq >= other.uniq
|
||||
def __hash__(self): return self.__hashedSuperTags
|
||||
def __len__(self): return self.__lenOfSuperTags
|
||||
def isSuperTagSetOf(self, tagSet):
|
||||
if len(tagSet) < self.__lenOfSuperTags:
|
||||
return
|
||||
idx = self.__lenOfSuperTags - 1
|
||||
while idx >= 0:
|
||||
if self.__superTags[idx] != tagSet[idx]:
|
||||
return
|
||||
idx = idx - 1
|
||||
return 1
|
||||
|
||||
def initTagSet(tag): return TagSet(tag, tag)
|
||||
@@ -1,66 +0,0 @@
|
||||
from pyasn1 import error
|
||||
|
||||
class TagMap:
|
||||
def __init__(self, posMap={}, negMap={}, defType=None):
|
||||
self.__posMap = posMap.copy()
|
||||
self.__negMap = negMap.copy()
|
||||
self.__defType = defType
|
||||
|
||||
def __contains__(self, tagSet):
|
||||
return tagSet in self.__posMap or \
|
||||
self.__defType is not None and tagSet not in self.__negMap
|
||||
|
||||
def __getitem__(self, tagSet):
|
||||
if tagSet in self.__posMap:
|
||||
return self.__posMap[tagSet]
|
||||
elif tagSet in self.__negMap:
|
||||
raise error.PyAsn1Error('Key in negative map')
|
||||
elif self.__defType is not None:
|
||||
return self.__defType
|
||||
else:
|
||||
raise KeyError()
|
||||
|
||||
def __repr__(self):
|
||||
s = self.__class__.__name__ + '('
|
||||
if self.__posMap:
|
||||
s = s + 'posMap=%r, ' % (self.__posMap,)
|
||||
if self.__negMap:
|
||||
s = s + 'negMap=%r, ' % (self.__negMap,)
|
||||
if self.__defType is not None:
|
||||
s = s + 'defType=%r' % (self.__defType,)
|
||||
return s + ')'
|
||||
|
||||
def __str__(self):
|
||||
s = self.__class__.__name__ + ':\n'
|
||||
if self.__posMap:
|
||||
s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()])
|
||||
if self.__negMap:
|
||||
s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()])
|
||||
if self.__defType is not None:
|
||||
s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType()
|
||||
return s
|
||||
|
||||
def clone(self, parentType, tagMap, uniq=False):
|
||||
if self.__defType is not None and tagMap.getDef() is not None:
|
||||
raise error.PyAsn1Error('Duplicate default value at %s' % (self,))
|
||||
if tagMap.getDef() is not None:
|
||||
defType = tagMap.getDef()
|
||||
else:
|
||||
defType = self.__defType
|
||||
|
||||
posMap = self.__posMap.copy()
|
||||
for k in tagMap.getPosMap():
|
||||
if uniq and k in posMap:
|
||||
raise error.PyAsn1Error('Duplicate positive key %s' % (k,))
|
||||
posMap[k] = parentType
|
||||
|
||||
negMap = self.__negMap.copy()
|
||||
negMap.update(tagMap.getNegMap())
|
||||
|
||||
return self.__class__(
|
||||
posMap, negMap, defType,
|
||||
)
|
||||
|
||||
def getPosMap(self): return self.__posMap.copy()
|
||||
def getNegMap(self): return self.__negMap.copy()
|
||||
def getDef(self): return self.__defType
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
||||
# ASN.1 "useful" types
|
||||
from pyasn1.type import char, tag
|
||||
|
||||
class ObjectDescriptor(char.GraphicString):
|
||||
tagSet = char.GraphicString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
|
||||
)
|
||||
|
||||
class GeneralizedTime(char.VisibleString):
|
||||
tagSet = char.VisibleString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
|
||||
)
|
||||
|
||||
class UTCTime(char.VisibleString):
|
||||
tagSet = char.VisibleString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
|
||||
)
|
||||
@@ -154,21 +154,25 @@ class NBNS:
|
||||
opcode = (code >> 11) & 0x0F
|
||||
flags = (code >> 4) & 0x7F
|
||||
rcode = code & 0x0F
|
||||
numnames = struct.unpack('B', data[self.HEADER_STRUCT_SIZE + 44])[0]
|
||||
|
||||
if numnames > 0:
|
||||
ret = [ ]
|
||||
offset = self.HEADER_STRUCT_SIZE + 45
|
||||
try:
|
||||
numnames = struct.unpack('B', data[self.HEADER_STRUCT_SIZE + 44])[0]
|
||||
|
||||
for i in range(0, numnames):
|
||||
mynme = data[offset:offset + 15]
|
||||
mynme = mynme.strip()
|
||||
ret.append(( mynme, ord(data[offset+15]) ))
|
||||
offset += 18
|
||||
if numnames > 0:
|
||||
ret = [ ]
|
||||
offset = self.HEADER_STRUCT_SIZE + 45
|
||||
|
||||
return trn_id, ret
|
||||
else:
|
||||
return trn_id, None
|
||||
for i in range(0, numnames):
|
||||
mynme = data[offset:offset + 15]
|
||||
mynme = mynme.strip()
|
||||
ret.append(( mynme, ord(data[offset+15]) ))
|
||||
offset += 18
|
||||
|
||||
return trn_id, ret
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
return trn_id, None
|
||||
|
||||
#
|
||||
# Contributed by Jason Anderson
|
||||
@@ -21,6 +21,7 @@ class SMBConnection(SMB):
|
||||
Create a new SMBConnection instance.
|
||||
|
||||
*username* and *password* are the user credentials required to authenticate the underlying SMB connection with the remote server.
|
||||
*password* can be a string or a callable returning a string.
|
||||
File operations can only be proceeded after the connection has been authenticated successfully.
|
||||
|
||||
Note that you need to call *connect* method to actually establish the SMB connection to the remote server and perform authentication.
|
||||
@@ -71,6 +72,15 @@ class SMBConnection(SMB):
|
||||
raise NotConnectedError('Server disconnected')
|
||||
total_sent = total_sent + sent
|
||||
|
||||
#
|
||||
# Support for "with" context
|
||||
#
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.close()
|
||||
|
||||
#
|
||||
# Misc Properties
|
||||
#
|
||||
@@ -389,10 +399,12 @@ class SMBConnection(SMB):
|
||||
|
||||
return results[0]
|
||||
|
||||
def deleteFiles(self, service_name, path_file_pattern, timeout = 30):
|
||||
def deleteFiles(self, service_name, path_file_pattern, delete_matching_folders = False, timeout = 30):
|
||||
"""
|
||||
Delete one or more regular files. It supports the use of wildcards in file names, allowing for deletion of multiple files in a single request.
|
||||
|
||||
If delete_matching_folders is True, immediate sub-folders that match the path_file_pattern will be deleted recursively.
|
||||
|
||||
:param string/unicode service_name: Contains the name of the shared folder.
|
||||
:param string/unicode path_file_pattern: The pathname of the file(s) to be deleted, relative to the service_name.
|
||||
Wildcards may be used in th filename component of the path.
|
||||
@@ -411,23 +423,27 @@ class SMBConnection(SMB):
|
||||
|
||||
self.is_busy = True
|
||||
try:
|
||||
self._deleteFiles(service_name, path_file_pattern, cb, eb, timeout = timeout)
|
||||
self._deleteFiles(service_name, path_file_pattern, delete_matching_folders, cb, eb, timeout = timeout)
|
||||
while self.is_busy:
|
||||
self._pollForNetBIOSPacket(timeout)
|
||||
finally:
|
||||
self.is_busy = False
|
||||
|
||||
def resetFileAttributes(self, service_name, path_file_pattern, timeout = 30):
|
||||
def resetFileAttributes(self, service_name, path_file_pattern, file_attributes = ATTR_NORMAL, timeout = 30):
|
||||
"""
|
||||
Reset file attributes of one or more regular files or folders.
|
||||
It supports the use of wildcards in file names, allowing for unlocking of multiple files/folders in a single request.
|
||||
This function is very helpful when deleting files/folders that are read-only.
|
||||
Note: this function is currently only implemented for SMB2! Technically, it sets the FILE_ATTRIBUTE_NORMAL flag, therefore clearing all other flags. (See https://msdn.microsoft.com/en-us/library/cc232110.aspx for further information)
|
||||
By default, it sets the ATTR_NORMAL flag, therefore clearing all other flags.
|
||||
(See https://msdn.microsoft.com/en-us/library/cc232110.aspx for further information)
|
||||
|
||||
Note: this function is currently only implemented for SMB2!
|
||||
|
||||
:param string/unicode service_name: Contains the name of the shared folder.
|
||||
:param string/unicode path_file_pattern: The pathname of the file(s) to be deleted, relative to the service_name.
|
||||
Wildcards may be used in the filename component of the path.
|
||||
If your path/filename contains non-English characters, you must pass in an unicode string.
|
||||
:param int file_attributes: The desired file attributes to set. Defaults to `ATTR_NORMAL`.
|
||||
:return: None
|
||||
"""
|
||||
if not self.sock:
|
||||
@@ -442,7 +458,7 @@ class SMBConnection(SMB):
|
||||
|
||||
self.is_busy = True
|
||||
try:
|
||||
self._resetFileAttributes(service_name, path_file_pattern, cb, eb, timeout = timeout)
|
||||
self._resetFileAttributes(service_name, path_file_pattern, cb, eb, file_attributes, timeout = timeout)
|
||||
while self.is_busy:
|
||||
self._pollForNetBIOSPacket(timeout)
|
||||
finally:
|
||||
@@ -592,17 +608,8 @@ class SMBConnection(SMB):
|
||||
raise ex
|
||||
|
||||
type_, flags, length = struct.unpack('>BBH', data)
|
||||
if type_ == 0x0:
|
||||
# This is a Direct TCP packet
|
||||
# The length is specified in the header from byte 8. (0-indexed)
|
||||
# we read a structure assuming NBT, so to get the real length
|
||||
# combine the length and flag fields together
|
||||
length = length + (flags << 16)
|
||||
else:
|
||||
# This is a NetBIOS over TCP (NBT) packet
|
||||
# The length is specified in the header from byte 16. (0-indexed)
|
||||
if flags & 0x01:
|
||||
length = length | 0x10000
|
||||
if flags & 0x01:
|
||||
length = length | 0x10000
|
||||
|
||||
read_len = length
|
||||
while read_len > 0:
|
||||
@@ -44,12 +44,15 @@ class SMBHandler(urllib2.BaseHandler):
|
||||
passwd = passwd or ''
|
||||
myname = MACHINE_NAME or self.generateClientMachineName()
|
||||
|
||||
n = NetBIOS()
|
||||
names = n.queryIPForName(host)
|
||||
if names:
|
||||
server_name = names[0]
|
||||
else:
|
||||
raise urllib2.URLError('SMB error: Hostname does not reply back with its machine name')
|
||||
server_name,host = host.split(',') if ',' in host else [None,host]
|
||||
|
||||
if server_name is None:
|
||||
n = NetBIOS()
|
||||
names = n.queryIPForName(host)
|
||||
if names:
|
||||
server_name = names[0]
|
||||
else:
|
||||
raise urllib2.URLError('SMB error: Hostname does not reply back with its machine name')
|
||||
|
||||
path, attrs = splitattr(req.get_selector())
|
||||
if path.startswith('/'):
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,6 @@
|
||||
|
||||
import types, hmac, binascii, struct, random
|
||||
import types, hmac, binascii, struct, random, string
|
||||
from .utils.rc4 import RC4_encrypt
|
||||
from utils.pyDes import des
|
||||
|
||||
try:
|
||||
@@ -58,14 +59,14 @@ NTLM_Negotiate56 = 0x80000000
|
||||
|
||||
NTLM_FLAGS = NTLM_NegotiateUnicode | \
|
||||
NTLM_RequestTarget | \
|
||||
NTLM_NegotiateSign | \
|
||||
NTLM_NegotiateNTLM | \
|
||||
NTLM_NegotiateAlwaysSign | \
|
||||
NTLM_NegotiateExtendedSecurity | \
|
||||
NTLM_NegotiateTargetInfo | \
|
||||
NTLM_NegotiateVersion | \
|
||||
NTLM_Negotiate128 | \
|
||||
NTLM_NegotiateKeyExchange | \
|
||||
NTLM_Negotiate56
|
||||
NTLM_NegotiateKeyExchange
|
||||
|
||||
def generateNegotiateMessage():
|
||||
"""
|
||||
@@ -81,7 +82,7 @@ def generateNegotiateMessage():
|
||||
return s
|
||||
|
||||
|
||||
def generateAuthenticateMessage(challenge_flags, nt_response, lm_response, session_key, user, domain = 'WORKGROUP', workstation = 'LOCALHOST'):
|
||||
def generateAuthenticateMessage(challenge_flags, nt_response, lm_response, request_session_key, user, domain = 'WORKGROUP', workstation = 'LOCALHOST'):
|
||||
"""
|
||||
References:
|
||||
===========
|
||||
@@ -90,6 +91,13 @@ def generateAuthenticateMessage(challenge_flags, nt_response, lm_response, sessi
|
||||
FORMAT = '<8sIHHIHHIHHIHHIHHIHHII'
|
||||
FORMAT_SIZE = struct.calcsize(FORMAT)
|
||||
|
||||
# [MS-NLMP]: 3.1.5.1.2
|
||||
# http://grutz.jingojango.net/exploits/davenport-ntlm.html
|
||||
session_key = session_signing_key = request_session_key
|
||||
if challenge_flags & NTLM_NegotiateKeyExchange:
|
||||
session_signing_key = "".join([ random.choice(string.digits+string.ascii_letters) for _ in range(16) ]).encode('ascii')
|
||||
session_key = RC4_encrypt(request_session_key, session_signing_key)
|
||||
|
||||
lm_response_length = len(lm_response)
|
||||
lm_response_offset = FORMAT_SIZE
|
||||
nt_response_length = len(nt_response)
|
||||
@@ -125,7 +133,7 @@ def generateAuthenticateMessage(challenge_flags, nt_response, lm_response, sessi
|
||||
session_key_length, session_key_length, session_key_offset,
|
||||
auth_flags)
|
||||
|
||||
return s + lm_response + nt_response + padding + domain_unicode + user_unicode + workstation_unicode + session_key
|
||||
return s + lm_response + nt_response + padding + domain_unicode + user_unicode + workstation_unicode + session_key, session_signing_key
|
||||
|
||||
|
||||
def decodeChallengeMessage(ntlm_data):
|
||||
@@ -50,12 +50,9 @@ SMB2_COMMAND_NAMES = {
|
||||
}
|
||||
|
||||
# Values for dialect_revision field in SMB2NegotiateResponse class
|
||||
SMB2_DIALECT_2 = 0x0202 # 2.0.2 - First SMB2 version
|
||||
SMB2_DIALECT_21 = 0x0210 # 2.1 - Windows 7
|
||||
SMB2_DIALET_30 = 0x0300 # 3.0 - Windows 8
|
||||
SMB2_DIALECT_302 = 0x0302 # 3.0.2 - Windows 8.1
|
||||
SMB2_DIALECT_311 = 0x0311 # 3.1.1 - Windows 10
|
||||
SMB2_DIALECT_2ALL = 0x02FF # Wildcard (for negotiation only)
|
||||
SMB2_DIALECT_2 = 0x0202
|
||||
SMB2_DIALECT_21 = 0x0210
|
||||
SMB2_DIALECT_2ALL = 0x02FF
|
||||
|
||||
# Bit mask for SecurityMode field in SMB2NegotiateResponse class
|
||||
SMB2_NEGOTIATE_SIGNING_ENABLED = 0x0001
|
||||
@@ -69,17 +66,6 @@ SMB2_SHARE_TYPE_PRINTER = 0x03
|
||||
# Bitmask for Capabilities in SMB2TreeConnectResponse class
|
||||
SMB2_SHARE_CAP_DFS = 0x0008
|
||||
|
||||
|
||||
# SMB 2.1 / 3 Capabilities flags
|
||||
SMB2_GLOBAL_CAP_DFS = 0x01
|
||||
SMB2_GLOBAL_CAP_LEASING = 0x02
|
||||
SMB2_GLOBAL_CAP_LARGE_MTU = 0x04
|
||||
SMB2_GLOBAL_CAP_MULTI_CHANNEL = 0x08
|
||||
SMB2_GLOBAL_CAP_PERSISTENT_HANDLES = 0x10
|
||||
SMB2_GLOBAL_CAP_DIRECTORY_LEASING = 0x20
|
||||
SMB2_GLOBAL_CAP_ENCRYPTION = 0x40
|
||||
|
||||
|
||||
# Values for OpLockLevel field in SMB2CreateRequest class
|
||||
SMB2_OPLOCK_LEVEL_NONE = 0x00
|
||||
SMB2_OPLOCK_LEVEL_II = 0x01
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
import os, sys, struct, types, logging, binascii, time, uuid
|
||||
import os, sys, struct, types, logging, binascii, time
|
||||
from StringIO import StringIO
|
||||
from smb_structs import ProtocolError
|
||||
from smb_constants import *
|
||||
@@ -23,15 +23,8 @@ class SMB2Message:
|
||||
log = logging.getLogger('SMB.SMB2Message')
|
||||
protocol = 2
|
||||
|
||||
|
||||
def __init__(self, conn = None, payload = None):
|
||||
"""
|
||||
Initialise a new SMB2 Message.
|
||||
conn - reference to the connection, the SMB class
|
||||
payload - the message payload, if any
|
||||
"""
|
||||
def __init__(self, payload = None):
|
||||
self.reset()
|
||||
self.conn = conn
|
||||
if payload:
|
||||
self.payload = payload
|
||||
self.payload.initMessage(self)
|
||||
@@ -67,10 +60,6 @@ class SMB2Message:
|
||||
self.pid = 0
|
||||
self.tid = 0
|
||||
|
||||
# credit related
|
||||
self.credit_charge = 0
|
||||
self.credit_request = 1
|
||||
|
||||
# Not used in this class. Maintained for compatibility with SMBMessage class
|
||||
self.flags2 = 0
|
||||
self.uid = 0
|
||||
@@ -80,66 +69,18 @@ class SMB2Message:
|
||||
def encode(self):
|
||||
"""
|
||||
Encode this SMB2 message into a series of bytes suitable to be embedded with a NetBIOS session message.
|
||||
AssertionError will be raised if this SMB message has not been initialized with an SMB instance
|
||||
AssertionError will be raised if this SMB message has not been initialized with a Payload instance
|
||||
|
||||
The header format is:
|
||||
- Protocol ID
|
||||
- Structure Size
|
||||
- Credit Charge
|
||||
- Status / Channel Sequence
|
||||
- Command
|
||||
- Credit Request / Credit Response
|
||||
- Flags
|
||||
- Next Compound
|
||||
- MessageId
|
||||
- Reserved
|
||||
- TreeId
|
||||
- Session ID
|
||||
- Signature
|
||||
|
||||
@return: a string containing the encoded SMB2 message
|
||||
"""
|
||||
assert self.payload
|
||||
assert self.conn
|
||||
|
||||
self.pid = os.getpid()
|
||||
self.payload.prepare(self)
|
||||
|
||||
# If Connection.Dialect is not "2.0.2" and if Connection.SupportsMultiCredit is TRUE, the
|
||||
# CreditCharge field in the SMB2 header MUST be set to ( 1 + (OutputBufferLength - 1) / 65536 )
|
||||
# This only applies to SMB2ReadRequest, SMB2WriteRequest, SMB2IoctlRequest and SMB2QueryDirectory
|
||||
# See: MS-SMB2 3.2.4.1.5: For all other requests, the client MUST set CreditCharge to 1, even if the
|
||||
# payload size of a request or the anticipated response is greater than 65536.
|
||||
if self.conn.smb2_dialect != SMB2_DIALECT_2:
|
||||
if self.conn.cap_multi_credit:
|
||||
# self.credit_charge will be set by some commands if necessary (Read/Write/Ioctl/QueryDirectory)
|
||||
# If not set, but dialect is SMB 2.1 or above, we must set it to 1
|
||||
if self.credit_charge is 0:
|
||||
self.credit_charge = 1
|
||||
else:
|
||||
# If >= SMB 2.1, but server does not support multi credit operations we must set to 1
|
||||
self.credit_charge = 1
|
||||
|
||||
if self.mid > 3:
|
||||
self.credit_request = 127
|
||||
|
||||
headers_data = struct.pack(self.HEADER_STRUCT_FORMAT,
|
||||
'\xFESMB', # Protocol ID
|
||||
self.HEADER_SIZE, # Structure Size
|
||||
self.credit_charge, # Credit Charge
|
||||
self.status, # Status / Channel Sequence
|
||||
self.command, # Command
|
||||
self.credit_request, # Credit Request / Credit Response
|
||||
self.flags, # Flags
|
||||
) + \
|
||||
struct.pack(self.SYNC_HEADER_STRUCT_FORMAT,
|
||||
self.next_command_offset, # Next Compound
|
||||
self.mid, # Message ID
|
||||
self.pid, # Process ID
|
||||
self.tid, # Tree ID
|
||||
self.session_id, # Session ID
|
||||
self.signature) # Signature
|
||||
'\xFESMB', self.HEADER_SIZE, 0, self.status, self.command, 0, self.flags) + \
|
||||
struct.pack(self.SYNC_HEADER_STRUCT_FORMAT, self.next_command_offset, self.mid, self.pid, self.tid, self.session_id, self.signature)
|
||||
return headers_data + self.data
|
||||
|
||||
def decode(self, buf):
|
||||
@@ -165,8 +106,7 @@ class SMB2Message:
|
||||
self.reset()
|
||||
|
||||
protocol, struct_size, self.credit_charge, self.status, \
|
||||
self.command, self.credit_response, \
|
||||
self.flags = struct.unpack(self.HEADER_STRUCT_FORMAT, buf[:self.HEADER_STRUCT_SIZE])
|
||||
self.command, self.credit_re, self.flags = struct.unpack(self.HEADER_STRUCT_FORMAT, buf[:self.HEADER_STRUCT_SIZE])
|
||||
|
||||
if protocol != '\xFESMB':
|
||||
raise ProtocolError('Invalid 4-byte SMB2 protocol field', buf)
|
||||
@@ -249,53 +189,6 @@ class Structure:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SMB2NegotiateRequest(Structure):
|
||||
"""
|
||||
2.2.3 SMB2 NEGOTIATE Request
|
||||
The SMB2 NEGOTIATE Request packet is used by the client to notify the server what dialects of the SMB 2 Protocol
|
||||
the client understands. This request is composed of an SMB2 header, as specified in section 2.2.1,
|
||||
followed by this request structure:
|
||||
|
||||
SMB2 Negotiate Request Packet structure:
|
||||
StructureSize (2 bytes)
|
||||
DialectCount (2 bytes)
|
||||
SecurityMode (2 bytes)
|
||||
Reserved (2 bytes)
|
||||
Capabilities (4 bytes)
|
||||
ClientGuid (16 bytes)
|
||||
ClientStartTime (8 bytes):
|
||||
ClientStartTime (8 bytes):
|
||||
Dialects (variable): An array of one or more 16-bit integers
|
||||
|
||||
References:
|
||||
===========
|
||||
- [MS-SMB2]: 2.2.3
|
||||
|
||||
"""
|
||||
|
||||
|
||||
STRUCTURE_FORMAT = "<HHHHI16sQHH"
|
||||
STRUCTURE_SIZE = struct.calcsize(STRUCTURE_FORMAT)
|
||||
|
||||
def initMessage(self, message):
|
||||
Structure.initMessage(self, message)
|
||||
message.command = SMB2_COM_NEGOTIATE
|
||||
|
||||
def prepare(self, message):
|
||||
# TODO! Do we need to save the GUID and present it later in other requests?
|
||||
# The SMB docs don't exactly explain what the guid is for
|
||||
message.data = struct.pack(self.STRUCTURE_FORMAT,
|
||||
36, # Structure size. Must be 36 as mandated by [MS-SMB2] 2.2.3
|
||||
2, # DialectCount
|
||||
0x01, # Security mode
|
||||
0, # Reserved
|
||||
0x00, # Capabilities
|
||||
uuid.uuid4().bytes, # Client GUID
|
||||
0, # Client start time
|
||||
SMB2_DIALECT_2,
|
||||
SMB2_DIALECT_21)
|
||||
|
||||
|
||||
class SMB2NegotiateResponse(Structure):
|
||||
"""
|
||||
Contains information on the SMB2_NEGOTIATE response from server
|
||||
@@ -330,7 +223,6 @@ class SMB2NegotiateResponse(Structure):
|
||||
self.server_start_time = convertFILETIMEtoEpoch(self.server_start_time)
|
||||
self.system_time = convertFILETIMEtoEpoch(self.system_time)
|
||||
self.security_blob = message.raw_data[security_buf_offset:security_buf_offset+security_buf_len]
|
||||
message.conn.smb2_dialect = self.dialect_revision
|
||||
|
||||
|
||||
class SMB2SessionSetupRequest(Structure):
|
||||
@@ -384,7 +276,7 @@ class SMB2SessionSetupResponse(Structure):
|
||||
|
||||
@property
|
||||
def isAnonymousSession(self):
|
||||
return (self.session_flags & 0x0002) > 0 # SMB2_SESSION_FLAG_IS_NULL
|
||||
return (self.session_flags & 0x0002) > 0 # SMB2_SESSION_FLAG_IS_NULL
|
||||
|
||||
def decode(self, message):
|
||||
assert message.command == SMB2_COM_SESSION_SETUP
|
||||
@@ -478,6 +370,7 @@ class SMB2CreateRequest(Structure):
|
||||
|
||||
def prepare(self, message):
|
||||
buf = self.filename.encode('UTF-16LE')
|
||||
filename_len = len(buf)
|
||||
if self.create_context_data:
|
||||
n = SMB2Message.HEADER_SIZE + self.STRUCTURE_SIZE + len(buf)
|
||||
if n % 8 != 0:
|
||||
@@ -505,7 +398,7 @@ class SMB2CreateRequest(Structure):
|
||||
self.create_disp,
|
||||
self.create_options,
|
||||
SMB2Message.HEADER_SIZE + self.STRUCTURE_SIZE, # NameOffset
|
||||
len(self.filename)*2, # NameLength in bytes
|
||||
filename_len, # Length of encoded filename in bytes
|
||||
create_context_offset, # CreateContextOffset
|
||||
len(self.create_context_data) # CreateContextLength
|
||||
) + buf
|
||||
@@ -581,13 +474,6 @@ class SMB2WriteRequest(Structure):
|
||||
0, # WriteChannelInfoLength
|
||||
self.flags) + self.data
|
||||
|
||||
# MS-SMB2 3.2.4.7
|
||||
# If a client requests writing to a file, Connection.Dialect is not "2.0.2", and if
|
||||
# Connection.SupportsMultiCredit is TRUE, the CreditCharge field in the SMB2 header MUST be set
|
||||
# to ( 1 + (Length - 1) / 65536 )
|
||||
if message.conn.smb2_dialect != SMB2_DIALECT_2 and message.conn.cap_multi_credit:
|
||||
message.credit_charge = int(1 + (len(self.data) -1) / 65536)
|
||||
|
||||
|
||||
class SMB2WriteResponse(Structure):
|
||||
"""
|
||||
@@ -646,13 +532,6 @@ class SMB2ReadRequest(Structure):
|
||||
0 # ReadChannelInfoLength
|
||||
) + '\0'
|
||||
|
||||
# MS-SMB2 3.2.4.6
|
||||
# If a client requests reading from a file, Connection.Dialect is not "2.0.2", and if
|
||||
# Connection.SupportsMultiCredit is TRUE, the CreditCharge field in the SMB2 header MUST be set
|
||||
# to ( 1 + (Length - 1) / 65536 )
|
||||
if message.conn.smb2_dialect != SMB2_DIALECT_2 and message.conn.cap_multi_credit:
|
||||
message.credit_charge = int(1 + (self.read_len -1) / 65536)
|
||||
|
||||
|
||||
class SMB2ReadResponse(Structure):
|
||||
"""
|
||||
@@ -709,11 +588,6 @@ class SMB2IoctlRequest(Structure):
|
||||
0 # Reserved
|
||||
) + self.in_data
|
||||
|
||||
# If Connection.SupportsMultiCredit is TRUE, the CreditCharge field in the SMB2 header
|
||||
# SHOULD be set to (max(InputCount, MaxOutputResponse) - 1) / 65536 + 1
|
||||
if message.conn.smb2_dialect != SMB2_DIALECT_2 and message.conn.cap_multi_credit:
|
||||
message.credit_charge = int((max(len(self.in_data), self.max_out_size) - 1) / 65536 + 1)
|
||||
|
||||
|
||||
class SMB2IoctlResponse(Structure):
|
||||
"""
|
||||
@@ -822,12 +696,6 @@ class SMB2QueryDirectoryRequest(Structure):
|
||||
len(self.filename)*2,
|
||||
self.output_buf_len) + self.filename.encode('UTF-16LE')
|
||||
|
||||
# MS-SMB2 3.2.4.17
|
||||
# If Connection.Dialect is not "2.0.2" and if Connection.SupportsMultiCredit is TRUE, the
|
||||
# CreditCharge field in the SMB2 header MUST be set to ( 1 + (OutputBufferLength - 1) / 65536 )
|
||||
if message.conn.smb2_dialect != SMB2_DIALECT_2 and message.conn.cap_multi_credit:
|
||||
message.credit_charge = int(1 + (self.output_buf_len -1) / 65536)
|
||||
|
||||
|
||||
class SMB2QueryDirectoryResponse(Structure):
|
||||
"""
|
||||
@@ -890,12 +758,6 @@ class SMB2QueryInfoRequest(Structure):
|
||||
self.fid # FileId
|
||||
) + self.input_buf
|
||||
|
||||
# MS-SMB2 3.2.4.17
|
||||
# If Connection.Dialect is not "2.0.2" and if Connection.SupportsMultiCredit is TRUE, the
|
||||
# CreditCharge field in the SMB2 header MUST be set to ( 1 + (OutputBufferLength - 1) / 65536 )
|
||||
if message.conn.smb2_dialect != SMB2_DIALECT_2 and message.conn.cap_multi_credit:
|
||||
message.credit_charge = int(1 + ((self.output_buf_len + len(self.input_buf)) -1) / 65536)
|
||||
|
||||
|
||||
class SMB2QueryInfoResponse(Structure):
|
||||
"""
|
||||
@@ -954,12 +816,6 @@ class SMB2SetInfoRequest(Structure):
|
||||
self.fid # FileId
|
||||
) + self.data
|
||||
|
||||
# MS-SMB2 3.2.4.17
|
||||
# If Connection.Dialect is not "2.0.2" and if Connection.SupportsMultiCredit is TRUE, the
|
||||
# CreditCharge field in the SMB2 header MUST be set to ( 1 + (OutputBufferLength - 1) / 65536 )
|
||||
if message.conn.smb2_dialect != SMB2_DIALECT_2 and message.conn.cap_multi_credit:
|
||||
message.credit_charge = int(1 + (len(self.data) -1) / 65536)
|
||||
|
||||
class SMB2SetInfoResponse(Structure):
|
||||
"""
|
||||
References:
|
||||
@@ -10,20 +10,17 @@ SUPPORT_EXTENDED_SECURITY = True
|
||||
# Set to True if you want to enable SMB2 protocol.
|
||||
SUPPORT_SMB2 = True
|
||||
|
||||
# Set to True if you want to enable SMB2.1 and above protocol.
|
||||
SUPPORT_SMB2x = True
|
||||
|
||||
# Supported dialects
|
||||
NT_LAN_MANAGER_DIALECT = 0 # 'NT LM 0.12' is always the first element in the dialect list and must always be included (MS-SMB 2.2.4.5.1)
|
||||
DIALECTS = [ ]
|
||||
for i, ( name, dialect ) in enumerate([ ( 'NT_LAN_MANAGER_DIALECT', 'NT LM 0.12' ), ]):
|
||||
DIALECTS.append(dialect)
|
||||
globals()[name] = i
|
||||
|
||||
DIALECTS2 = [ ]
|
||||
for i, ( name, dialect ) in enumerate([ ( 'SMB2_DIALECT', 'SMB 2.002' ) ]):
|
||||
DIALECTS2.append(dialect)
|
||||
globals()[name] = i + len(DIALECTS)
|
||||
|
||||
# Return the list of support SMB dialects based on the SUPPORT_x constants
|
||||
def init_dialects_list():
|
||||
dialects = [ 'NT LM 0.12' ]
|
||||
if SUPPORT_SMB2:
|
||||
dialects.append('SMB 2.002')
|
||||
if SUPPORT_SMB2x:
|
||||
dialects.append('SMB 2.???')
|
||||
return dialects
|
||||
|
||||
class UnsupportedFeature(Exception):
|
||||
"""
|
||||
@@ -111,9 +108,8 @@ class SMBMessage:
|
||||
log = logging.getLogger('SMB.SMBMessage')
|
||||
protocol = 1
|
||||
|
||||
def __init__(self, conn, payload = None):
|
||||
def __init__(self, payload = None):
|
||||
self.reset()
|
||||
self.conn = conn
|
||||
if payload:
|
||||
self.payload = payload
|
||||
self.payload.initMessage(self)
|
||||
@@ -294,7 +290,10 @@ class ComNegotiateRequest(Payload):
|
||||
def prepare(self, message):
|
||||
assert message.payload == self
|
||||
message.parameters_data = ''
|
||||
message.data = ''.join(map(lambda s: '\x02'+s+'\x00', init_dialects_list()))
|
||||
if SUPPORT_SMB2:
|
||||
message.data = ''.join(map(lambda s: '\x02'+s+'\x00', DIALECTS + DIALECTS2))
|
||||
else:
|
||||
message.data = ''.join(map(lambda s: '\x02'+s+'\x00', DIALECTS))
|
||||
|
||||
|
||||
class ComNegotiateResponse(Payload):
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user