Skip to content
This repository has been archived by the owner on Oct 8, 2023. It is now read-only.

[feat] PEP8 Support #110

Open
wants to merge 8 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
7 changes: 7 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ jobs:
python3 -m venv venv
. venv/bin/activate
pip install -r requirements.txt
pip install -r dev-requirements.txt

- save_cache:
paths:
Expand All @@ -49,6 +50,12 @@ jobs:
command: |
. venv/bin/activate
python test.py

- run:
name: Code Style Check
command: |
. venv/bin/activate
flake8 vibora
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This misses the Python syntax error in #32 that flake8 is capable of finding.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

all right,I will recovery it

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cclauss
I check this issue. The author has fixed this problem.

test-3.7:
<<: *test-template
docker:
Expand Down
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[flake8]
ignore=E501,E401,F403,F401,F811,W293
1 change: 1 addition & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
flake8
62 changes: 41 additions & 21 deletions vibora/client/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,16 @@ class ResponseStatus(Enum):

class Response:

__slots__ = ('_connection', '_headers', '_content', '_parser', '_parser_status', '_cookies',
'_status_code', '_decode', '_chunk_size', '_decoder', '_encoding',
'request', 'url')

def __init__(self, url: str, connection: Connection,
request: Request, chunk_size: int= 1 * 1024 * 1024, decode: bool=True):
__slots__ = ('_connection', '_headers', '_content', '_parser',
'_parser_status', '_cookies', '_status_code', '_decode',
'_chunk_size', '_decoder', '_encoding', 'request', 'url')

def __init__(self,
url: str,
connection: Connection,
request: Request,
chunk_size: int = 1 * 1024 * 1024,
decode: bool = True):
self._connection = connection
self._headers = {}
self._content = bytearray()
Expand All @@ -52,7 +56,7 @@ def json(self, *args, loads=None, **kwargs):
loads = json.loads
return loads(self.content.decode(self.encoding), *args, **kwargs)

def text(self, encoding: str=None) -> str:
def text(self, encoding: str = None) -> str:
return self.content.decode(encoding=encoding or self.encoding)

@property
Expand All @@ -62,8 +66,9 @@ def status_code(self):
:return:
"""
if self._parser_status == ResponseStatus.PENDING_HEADERS:
raise Exception('Status code not loaded yet. '
'In streaming mode you should manually call load_headers().')
raise Exception(
'Status code not loaded yet. '
'In streaming mode you should manually call load_headers().')
return self._status_code

@property
Expand All @@ -73,8 +78,9 @@ def headers(self):
:return:
"""
if self._parser_status == ResponseStatus.PENDING_HEADERS:
raise Exception('Headers not loaded yet. '
'In streaming mode you should manually call load_headers().')
raise Exception(
'Headers not loaded yet. '
'In streaming mode you should manually call load_headers().')
return self._headers

@property
Expand All @@ -99,7 +105,8 @@ async def cookies(self):
if self._cookies is None:
self._cookies = CookiesJar()
if self._headers.get('set-cookie'):
self._cookies.add_cookie(Cookie.from_header(self._headers['set-cookie']))
self._cookies.add_cookie(
Cookie.from_header(self._headers['set-cookie']))
return self._cookies

async def read_content(self):
Expand All @@ -114,7 +121,8 @@ async def read_content(self):
length = int(self._headers['Content-Length'])
if not self._decoder:
# Skipping the HTTP parser for performance.
self._content.extend(await self._connection.read_exactly(length))
self._content.extend(await
self._connection.read_exactly(length))
self.on_message_complete()
else:
self._parser.feed(await self._connection.read_exactly(length))
Expand Down Expand Up @@ -159,18 +167,22 @@ async def _release_connection(self):

:return:
"""
await self._connection.pool.release_connection(self._connection, self._parser.should_keep_alive())
await self._connection.pool.release_connection(
self._connection, self._parser.should_keep_alive())

async def stream(self, chunk_size: int=1*1024*1024, chunk_timeout: int=10,
complete_timeout: int=300):
async def stream(self,
chunk_size: int = 1 * 1024 * 1024,
chunk_timeout: int = 10,
complete_timeout: int = 300):
"""

:param complete_timeout:
:param chunk_timeout:
:param chunk_size:
:return:
"""
if self._parser_status not in (ResponseStatus.PENDING_HEADERS, ResponseStatus.PENDING_BODY):
if self._parser_status not in (ResponseStatus.PENDING_HEADERS,
ResponseStatus.PENDING_BODY):
raise StreamAlreadyConsumed
if self._parser_status == ResponseStatus.PENDING_HEADERS:
await wait_for(self.receive_headers(), chunk_timeout)
Expand All @@ -182,7 +194,8 @@ async def stream(self, chunk_size: int=1*1024*1024, chunk_timeout: int=10,
bytes_to_read = min(remaining, chunk_size)
task = self._connection.read_exactly(bytes_to_read)
start_time = time.time()
self._parser.feed(await wait_for(task, min(chunk_timeout, complete_timeout)))
self._parser.feed(await wait_for(
task, min(chunk_timeout, complete_timeout)))
complete_timeout -= time.time() - start_time
remaining -= bytes_to_read
yield bytes(self._content)
Expand All @@ -194,9 +207,11 @@ async def stream(self, chunk_size: int=1*1024*1024, chunk_timeout: int=10,
task = self._connection.read_until(b'\r\n')
start_time = time.time()
try:
self._parser.feed(await wait_for(task, min(chunk_timeout, complete_timeout)))
self._parser.feed(await wait_for(
task, min(chunk_timeout, complete_timeout)))
except asyncio.LimitOverrunError as error:
self._parser.feed(await self._connection.read_exactly(error.consumed))
self._parser.feed(await self._connection.read_exactly(
error.consumed))
complete_timeout -= time.time() - start_time
while len(self._content) >= chunk_size:
yield self._content[:chunk_size]
Expand Down Expand Up @@ -248,7 +263,12 @@ def on_message_complete(self):
except Exception as error:
print(error)

def chunk_complete(self): pass
def chunk_complete(self):
"""

:return:
"""
pass

def __repr__(self):
if 400 > self.status_code >= 300:
Expand Down
59 changes: 48 additions & 11 deletions vibora/client/websocket.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@
from vibora.websockets import FrameParser
from .request import WebsocketRequest


# https://websocket.org/echo.html


class WebsocketProtocol(asyncio.Protocol):

def __init__(self, transport, loop):
self.loop = loop
self.transport = transport
Expand All @@ -28,7 +26,6 @@ async def on_message(self, data):


class WebsocketHandshake(asyncio.Protocol):

def __init__(self, client, loop):
self.client = client
self.loop = loop
Expand All @@ -43,33 +40,64 @@ def connection_made(self, transport):
:param transport:
:return:
"""
wr = WebsocketRequest(self.client.host, path=self.client.path, origin=self.client.origin)
transport.write(wr.encode())
websocket_response = WebsocketRequest(
self.client.host, path=self.client.path, origin=self.client.origin)
transport.write(websocket_response.encode())
self.transport = transport
print('connected')

def data_received(self, data):
"""

:param data:
:return:
"""
self.parser.feed(data)
print(f'Data received: {data}')

def connection_lost(self, exc):
"""

:param exc:
:return:
"""
print('The server closed the connection')
print('Stop the event loop')

# Parser Callbacks
def on_body(self): pass
def on_body(self):
"""

:return:
"""
pass

def on_headers_complete(self, headers, status_code):
"""

:param headers:
:param status_code:
:return:
"""
self.current_status = status_code
self.current_headers = headers

def on_message_complete(self):
self.transport.set_protocol(WebsocketProtocol(self.transport, self.loop))
"""

:return:
"""
self.transport.set_protocol(
WebsocketProtocol(self.transport, self.loop))


class WebsocketClient:

def __init__(self, host: str, port: int, path: str = '/', loop=None, origin: str = None):
def __init__(self,
host: str,
port: int,
path: str = '/',
loop=None,
origin: str = None):
self.host = host
self.port = port
self.path = path
Expand All @@ -80,12 +108,21 @@ def __init__(self, host: str, port: int, path: str = '/', loop=None, origin: str

async def connect(self):
factory = partial(WebsocketHandshake, self, self.loop)
await self.loop.create_connection(factory, host=self.host, port=self.port, ssl=True)
await self.loop.create_connection(
factory, host=self.host, port=self.port, ssl=True)

async def send(self, msg):
if not self.connected:
await self.connect()
pass

async def receive(self, max_size: int = 1 * 1024 * 1024, stream: bool = False):
async def receive(self,
max_size: int = 1 * 1024 * 1024,
stream: bool = False):
"""

:param max_size:
:param stream:
:return:
"""
pass
12 changes: 4 additions & 8 deletions vibora/limits.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@


class ServerLimits:

__slots__ = ('worker_timeout', 'keep_alive_timeout', 'response_timeout', 'max_body_size',
'max_headers_size', 'write_buffer')

def __init__(self, worker_timeout: int=60, keep_alive_timeout: int=30,
max_headers_size: int=1024 * 10, write_buffer: int=419430):
def __init__(self, worker_timeout: int = 60, keep_alive_timeout: int = 30,
max_headers_size: int = 1024 * 10, write_buffer: int = 419430):
"""

:param worker_timeout:
Expand All @@ -20,11 +17,10 @@ def __init__(self, worker_timeout: int=60, keep_alive_timeout: int=30,


class RouteLimits:

__slots__ = ('timeout', 'max_body_size', 'in_memory_threshold')

def __init__(self, max_body_size: int=1*1024*1024, timeout: int=30,
in_memory_threshold: int=1*1024*1024):
def __init__(self, max_body_size: int = 1 * 1024 * 1024, timeout: int = 30,
in_memory_threshold: int = 1 * 1024 * 1024):
"""

:param max_body_size:
Expand Down
33 changes: 23 additions & 10 deletions vibora/multipart/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,16 @@ def read(self, size):


class FileUpload:
def __init__(self, name: str=None, path: str=None, content: bytes=None, iterable=None,
f=None, headers: list=None):
def __init__(self,
name: str = None,
path: str = None,
content: bytes = None,
iterable=None,
f=None,
headers: list = None):
if not any([path, content, iterable, f]):
raise Exception('You must supply either: path, content, iterable, f')
raise Exception(
'You must supply either: path, content, iterable, f')
self.name = name
if f:
self.f = f
Expand All @@ -41,9 +47,12 @@ def __init__(self, name: str=None, path: str=None, content: bytes=None, iterable


class MultipartEncoder:

def __init__(self, delimiter: bytes, params: dict, chunk_size: int=1*1024*1024,
loop=None, encoding: str='utf-8'):
def __init__(self,
delimiter: bytes,
params: dict,
chunk_size: int = 1 * 1024 * 1024,
loop=None,
encoding: str = 'utf-8'):
self.delimiter = b'--' + delimiter
self.params = params
self.chunk_size = chunk_size
Expand All @@ -59,9 +68,11 @@ def create_headers(self, name: str, value) -> bytes:
:return:
"""
if isinstance(value, FileUpload):
return f'Content-Disposition: form-data; name="{name}"; filename="{value.name}"'.encode(self.encoding)
return f'Content-Disposition: form-data; name="{name}"; filename="{value.name}"'.encode(
self.encoding)
else:
return f'Content-Disposition: form-data; name="{name}"'.encode(self.encoding)
return f'Content-Disposition: form-data; name="{name}"'.encode(
self.encoding)

def stream_value(self, value) -> bytes:
"""
Expand All @@ -72,7 +83,8 @@ def stream_value(self, value) -> bytes:
if isinstance(value, FileUpload):
while True:
if value.is_async:
chunk = self.loop.run_until_complete(value.f.read(self.chunk_size))
chunk = self.loop.run_until_complete(
value.f.read(self.chunk_size))
else:
chunk = value.f.read(self.chunk_size)
size = len(chunk)
Expand All @@ -95,7 +107,8 @@ def __iter__(self):
if self.evaluated:
raise Exception('Streaming encoder cannot be evaluated twice.')
for name, value in self.params.items():
header = self.delimiter + b'\r\n' + self.create_headers(name, value) + b'\r\n\r\n'
header = self.delimiter + b'\r\n' + self.create_headers(
name, value) + b'\r\n\r\n'
yield header
for chunk in self.stream_value(value):
yield chunk
Expand Down