8000 Fix various typos across the project by kxepal · Pull Request #88 · aio-libs/aiohttp · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Fix various typos across the project #88

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 29, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions aiohttp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def request(method, url, *,
:param chunked: Boolean or Integer. Set to chunk size for chunked
transfer encoding.
:param expect100: Boolean. Expect 100-continue response from server.
:param connector: aiohttp.conntect.BaseConnector instance to support
:param connector: aiohttp.connector.BaseConnector instance to support
connection pooling and session cookies.
:param read_until_eof: Read response until eof if response
does not have Content-Length header.
Expand Down Expand Up @@ -369,7 +369,7 @@ def update_auth(self, basic_login, basic_passwd):
('%s:%s' % (basic_login, basic_passwd)).encode('latin1'))
.strip().decode('latin1'))
elif basic_login is not None or basic_passwd is not None:
raise ValueError("HTTP Auth login of password is missing")
raise ValueError("HTTP Auth login or password is missing")

def update_body_from_data(self, data):
if (hasattr(data, '__iter__') and not isinstance(
Expand Down Expand Up @@ -787,7 +787,7 @@ def encode_multipart_data(fields, boundary, encoding='utf-8', chunk_size=8196):


class HttpClient:
"""Allow to use mutiple hosts with same path. And automatically
"""Allow to use multiple hosts with same path. And automatically
mark failed hosts.
"""

Expand Down
2 changes: 1 addition & 1 deletion aiohttp/multidict.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


class MultiDict(abc.Mapping):
"""Read-only ordered dictionary that can hava multiple values for each key.
"""Read-only ordered dictionary that can have multiple values for each key.

This type of MultiDict must be used for request headers and query args.
"""
Expand Down
4 changes: 2 additions & 2 deletions aiohttp/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
Parser receives data with generator's send() method and sends data to
destination DataQueue. Parser receives ParserBuffer and DataQueue objects
as a parameters of the parser call, all subsequent send() calls should
send bytes objects. Parser sends parsed `term` to desitnation buffer with
send bytes objects. Parser sends parsed `term` to destination buffer with
DataQueue.feed_data() method. DataQueue object should implement two methods.
feed_data() - parser uses this method to send parsed protocol data.
feed_eof() - parser uses this method for indication of end of parsing stream.
Expand Down Expand Up @@ -47,7 +47,7 @@ def set_parser(self, parser):

* Eof:

1. StreamParser recevies eof with feed_eof() call.
1. StreamParser receives eof with feed_eof() call.
2. StreamParser throws EofStream exception into parser.
3. Then it unsets parser.

Expand Down
18 changes: 9 additions & 9 deletions aiohttp/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ def parse_length_payload(self, out, buf, length=0):
raise errors.IncompleteRead(length-required, required)

def parse_eof_payload(self, out, buf):
"""Read all bytes untile eof."""
"""Read all bytes until eof."""
try:
while True:
out.feed_data((yield from buf.readsome()))
Expand All @@ -348,7 +348,7 @@ def parse_eof_payload(self, out, buf):


class DeflateBuffer:
"""DeflateStream decomress stream and feed data into specified stream."""
"""DeflateStream decompress stream and feed data into specified stream."""

def __init__(self, out, encoding):
self.out = out
Expand Down Expand Up @@ -377,7 +377,7 @@ def feed_eof(self):
def wrap_payload_filter(func):
"""Wraps payload filter and piped filters.

Filter is a generatator that accepts arbitrary chunks of data,
Filter is a generator that accepts arbitrary chunks of data,
modify data and emit new stream of data.

For example we have stream of chunks: ['1', '2', '3', '4', '5'],
Expand All @@ -403,7 +403,7 @@ def wrap_payload_filter(func):

1. If filter receives bytes object, it should process data
and yield processed data then yield EOL_MARKER object.
2. If Filter recevied EOF_MARKER, it should yield remaining
2. If Filter received EOF_MARKER, it should yield remaining
data (buffered) and then yield EOF_MARKER.
"""
@functools.wraps(func)
Expand Down Expand Up @@ -433,7 +433,7 @@ def filter_pipe(filter, filter2):
2. Reads yielded values from the first filter until it receives
EOF_MARKER or EOL_MARKER.
3. Each of this values is being send to second filter.
4. Reads yielded values from second filter until it recives EOF_MARKER or
4. Reads yielded values from second filter until it receives EOF_MARKER or
EOL_MARKER. Each of this values yields to writer.
"""
chunk = yield
Expand Down Expand Up @@ -487,7 +487,7 @@ class HttpMessage:

Now we can use chunked writer to write stream to a network stream.
First call to write() method sends response status line and headers,
add_header() and add_headers() method unavailble at this stage:
add_header() and add_headers() method unavailable at this stage:

>> with open('...', 'rb') as f:
.. chunk = fp.read(8196)
Expand All @@ -500,7 +500,7 @@ class HttpMessage:

writer = None

# 'filter' is being used for altering write() bahaviour,
# 'filter' is being used for altering write() behaviour,
# add_chunking_filter adds deflate/gzip compression and
# add_compression_filter splits incoming data into a chunks.
filter = None
Expand Down Expand Up @@ -591,7 +591,7 @@ def add_header(self, name, value):
if name == 'USER-AGENT':
self._has_user_agent = True

# ignore hopbyhop headers
# ignore hop-by-hop headers
self.headers.add(name, value)

def add_headers(self, *headers):
Expand Down Expand Up @@ -651,7 +651,7 @@ def _add_default_headers(self):
self.headers['CONNECTION'] = connection

def write(self, chunk):
"""write() writes chunk of data to a steram by using different writers.
"""write() writes chunk of data to a stream by using different writers.
writer uses filter to modify chunk of data. write_eof() indicates
end of stream. writer can't be used after write_eof() method
being called. write() return drain future.
Expand Down
4 changes: 2 additions & 2 deletions aiohttp/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class ServerHttpProtocol(aiohttp.StreamProtocol):

ServerHttpProtocol handles incoming http request. It reads request line,
request headers and request payload and calls handler_request() method.
By default it always returns with 404 respose.
By default it always returns with 404 response.

ServerHttpProtocol handles errors in incoming request, like bad
status line, bad headers or incomplete payload. If any error occurs,
Expand Down Expand Up @@ -147,7 +147,7 @@ def start(self):
"""Start processing of incoming requests.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various excetions in request
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed always unless
keep_alive(True) specified.
"""
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/test_utils.py
F438
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def run(loop, fut):
# call pending connection_made if present
run_briefly(thread_loop)

# close opened trnsports
# close opened transports
for tr in transports:
tr.close()

Expand Down
2 changes: 1 addition & 1 deletion aiohttp/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


def atoms(message, environ, response, request_time):
"""Gets atoms for log formating."""
"""Gets atoms for log formatting."""
if message:
r = '{} {} HTTP/{}.{}'.format(
message.method, message.path,
Expand Down
8 changes: 4 additions & 4 deletions examples/mpsrv.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def heartbeat(self):
try:
msg = yield from reader.read()
except aiohttp.EofStream:
print('Superviser is dead, {} stopping...'.format(os.getpid()))
print('Supervisor is dead, {} stopping...'.format(os.getpid()))
self.loop.stop()
break

Expand Down Expand Up @@ -257,7 +257,7 @@ def kill(self):
os.kill(self.pid, signal.SIGTERM)


class Superviser:
class Supervisor:

def __init__(self, args):
self.loop = asyncio.get_event_loop()
Expand Down Expand Up @@ -286,8 +286,8 @@ def main():
args.host, port = args.host.split(':', 1)
args.port = int(port)

superviser = Superviser(args)
superviser.start()
supervisor = Supervisor(args)
supervisor.start()


if __name__ == '__main__':
Expand Down
10 changes: 5 additions & 5 deletions examples/wssrv.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def handle_request(self, message, payload):
try:
msg = yield from dataqueue.read()
except aiohttp.EofStream:
# client droped connection
# client dropped connection
break

if msg.tp == websocket.MSG_PING:
Expand Down Expand Up @@ -164,7 +164,7 @@ def heartbeat(self):
try:
msg = yield from reader.read()
except aiohttp.EofStream:
print('Superviser is dead, {} stopping...'.format(os.getpid()))
print('Supervisor is dead, {} stopping...'.format(os.getpid()))
self.loop.stop()
break

Expand Down Expand Up @@ -281,7 +281,7 @@ def kill(self):
os.kill(self.pid, signal.SIGTERM)


class Superviser:
class Supervisor:

def __init__(self, args):
self.loop = asyncio.get_event_loop()
Expand Down Expand Up @@ -310,8 +310,8 @@ def main():
args.host, port = args.host.split(':', 1)
args.port = int(port)

superviser = Superviser(args)
superviser.start()
supervisor = Supervisor(args)
supervisor.start()


if __name__ == '__main__':
Expand Down
0