Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,12 @@ def parse_args():
def get_protos(path):
protos = []
for root, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith('.proto'):
protos.append(os.path.join(root, filename))
protos.extend(
os.path.join(root, filename)
for filename in filenames
if filename.endswith('.proto')
)

Comment on lines -55 to +60
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function get_protos refactored with the following changes:

return protos


Expand Down
11 changes: 4 additions & 7 deletions examples/echo_asyncio/asyncio_protocol_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def __init__(self):

def connection_made(self, transport):
sock = transport.get_extra_info('socket')
args.debug('Connection to {}'.format(sock))
args.debug(f'Connection to {sock}')
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function EchoProtocol.connection_made refactored with the following changes:

self.transport = transport

def data_received(self, data):
Expand Down Expand Up @@ -38,7 +38,7 @@ async def wrapper(loop, address, count):
write = transport.write
req = b'a' * args.msize
receive_event = protocol.receive_event = asyncio.Event()
for x in range(count):
for _ in range(count):
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function wrapper refactored with the following changes:

write(req)
await receive_event.wait()
receive_event.clear()
Expand All @@ -52,12 +52,9 @@ async def main():
global args
args = parse_args(get_client_parser())
loop = asyncio.get_running_loop()
tasks = []
for x in range(args.concurrency):
tasks.append(asyncio.create_task(
tasks = [asyncio.create_task(
wrapper(loop, args.address, args.request)
))

) for _ in range(args.concurrency)]
Comment on lines -55 to +57
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:

await asyncio.gather(*tasks)


Expand Down
2 changes: 1 addition & 1 deletion examples/echo_asyncio/asyncio_protocol_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class EchoProtocol(asyncio.Protocol):

def connection_made(self, transport):
sock = transport.get_extra_info('socket')
args.debug('Connection from {}'.format(sock))
args.debug(f'Connection from {sock}')
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function EchoProtocol.connection_made refactored with the following changes:

self.transport = transport

def data_received(self, data):
Expand Down
8 changes: 4 additions & 4 deletions examples/echo_asyncio/asyncio_stream_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ async def wrapper(address, count):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except (OSError, NameError):
args.debug('set nodelay failed')
pass
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function wrapper refactored with the following changes:

args.debug(f'[conn][reader|{reader}][writer|{writer}] connected')

req = b'a' * args.msize
Expand All @@ -35,9 +34,10 @@ async def wrapper(address, count):
async def main():
global args
args = parse_args(get_client_parser())
tasks = []
for x in range(args.concurrency):
tasks.append(asyncio.create_task(wrapper(args.address, args.request)))
tasks = [
asyncio.create_task(wrapper(args.address, args.request))
for _ in range(args.concurrency)
]
Comment on lines -38 to +40
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:


await asyncio.gather(*tasks)

Expand Down
1 change: 0 additions & 1 deletion examples/echo_asyncio/asyncio_stream_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ async def handler(reader, writer):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except (OSError, NameError) as ex:
args.debug(f'set nodelay failed {ex}')
pass
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function handler refactored with the following changes:

read, write = reader.read, writer.write
while 1:
data = await read(256 * 1024)
Expand Down
9 changes: 3 additions & 6 deletions examples/echo_pymaid/datagram_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ async def wrapper(loop, address, count):
write = transport.sendto
req = b'a' * args.msize
receive_event = protocol.receive_event = pymaid.Event()
for x in range(count):
for _ in range(count):
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function wrapper refactored with the following changes:

write(req)
await receive_event.wait()
receive_event.clear()
Expand All @@ -42,12 +42,9 @@ async def main():
global args
args = parse_args(get_client_parser())
loop = pymaid.get_event_loop()
tasks = []
for x in range(args.concurrency):
tasks.append(pymaid.create_task(
tasks = [pymaid.create_task(
wrapper(loop, args.address, args.request)
))

) for _ in range(args.concurrency)]
Comment on lines -45 to +47
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:

await pymaid.gather(*tasks)


Expand Down
9 changes: 4 additions & 5 deletions examples/echo_ws/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,10 @@ async def wrapper(address, count, msize):

async def main():
args = parse_args(get_client_parser())
tasks = []
for x in range(args.concurrency):
tasks.append(
pymaid.create_task(wrapper(args.address, args.request, args.msize))
)
tasks = [
pymaid.create_task(wrapper(args.address, args.request, args.msize))
for _ in range(args.concurrency)
]
Comment on lines -32 to +35
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:


# await pymaid.wait(tasks, timeout=args.timeout)
await pymaid.gather(*tasks)
Expand Down
7 changes: 4 additions & 3 deletions examples/heartbeat/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ async def wrapper(address):

async def main():
args = parse_args(get_client_parser())
tasks = []
for x in range(args.concurrency):
tasks.append(pymaid.create_task(wrapper(args.address)))
tasks = [
pymaid.create_task(wrapper(args.address))
for _ in range(args.concurrency)
]
Comment on lines -14 to +17
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:


# await pymaid.wait(tasks, timeout=args.timeout)
await pymaid.gather(*tasks)
Expand Down
7 changes: 4 additions & 3 deletions examples/net/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ async def wrapper(address, count):

async def main():
args = parse_args(get_client_parser())
tasks = []
for x in range(args.concurrency):
tasks.append(pymaid.create_task(wrapper(args.address, args.request)))
tasks = [
pymaid.create_task(wrapper(args.address, args.request))
for _ in range(args.concurrency)
]
Comment on lines -28 to +31
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:


# await pymaid.wait(tasks, timeout=args.timeout)
await pymaid.gather(*tasks)
Expand Down
7 changes: 4 additions & 3 deletions examples/pb/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,12 @@
async def main():
args = parse_args(get_client_parser())
service = pymaid.rpc.pb.router.PBRouterStub(EchoService_Stub)
tasks = []
address = args.address
request = args.request
for x in range(args.concurrency):
tasks.append(pymaid.create_task(worker(address, service, request)))
tasks = [
pymaid.create_task(worker(address, service, request))
for _ in range(args.concurrency)
]
Comment on lines -13 to +18
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:


# await pymaid.wait(tasks, timeout=args.timeout)
await pymaid.gather(*tasks)
Expand Down
20 changes: 1 addition & 19 deletions examples/pb/stub.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ async def get_requests():
async def worker(address, service, count, **kwargs):
conn = await pymaid.rpc.pb.dial_stream(address, **kwargs)

for x in range(count):
for _ in range(count):
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function worker refactored with the following changes:

This removes the following comments ( why? ):

#     async for req in get_requests():
#     # or let context handle this at cleanup for you
#     # or you can send requests first, then wait for responses
# # This block performs the same STREAM_STREAM interaction as above
#         resp = await context.recv_message()
#         await context.send_message(request)
#     # you can send end message yourself
# async with service.StreamStreamEcho.open(conn=conn) as context:
#         assert len(resp.message) == 8000
#         # you can still do something here
# # while showing more advanced stream control features.
#     async for resp in context:
#     await context.send_message(end=True)

# UnaryUnaryEcho
resp = await service.UnaryUnaryEcho(request, conn=conn)
assert len(resp.message) == 8000
Expand Down Expand Up @@ -55,24 +55,6 @@ async def worker(address, service, count, **kwargs):
async for resp in service.StreamStreamEcho(get_requests(), conn=conn):
assert len(resp.message) == 8000

# # This block performs the same STREAM_STREAM interaction as above
# # while showing more advanced stream control features.
# async with service.StreamStreamEcho.open(conn=conn) as context:
# async for req in get_requests():
# await context.send_message(request)
# # you can still do something here
# resp = await context.recv_message()
# assert len(resp.message) == 8000
# # or you can send requests first, then wait for responses
# async for req in get_requests():
# await context.send_message(request)
# # you can still do something here
# async for resp in context:
# # you can still do something here
# assert len(resp.message) == 8000
# # you can send end message yourself
# # or let context handle this at cleanup for you
# await context.send_message(end=True)
conn.shutdown()
conn.close()
await conn.wait_closed()
9 changes: 2 additions & 7 deletions examples/pb/ws_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,16 @@
async def main():
args = parse_args(get_client_parser())
service = pymaid.rpc.pb.router.PBRouterStub(EchoService_Stub)
tasks = []
address = args.address
request = args.request
for x in range(args.concurrency):
tasks.append(
pymaid.create_task(
tasks = [pymaid.create_task(
worker(
address,
service,
request,
transport_class=WebSocket | Connection,
)
)
)

) for _ in range(args.concurrency)]
Comment on lines -16 to +25
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function main refactored with the following changes:

# await pymaid.wait(tasks, timeout=args.timeout)
await pymaid.gather(*tasks)

Expand Down
21 changes: 12 additions & 9 deletions plugin/js/jsimpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,12 @@ def parse_args():
def get_modules(root_path):
modules = []
for root, dirnames, filenames in os.walk(root_path):
for filename in filenames:
if filename.endswith('_pb2.py'):
modules.append(os.path.join(root, filename))
modules.extend(
os.path.join(root, filename)
for filename in filenames
if filename.endswith('_pb2.py')
)

Comment on lines -92 to +97
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function get_modules refactored with the following changes:

print('modules', modules)
return modules

Expand All @@ -116,7 +119,7 @@ def extra_message(message, indent=' '):
text = f'{indent}{field.name}: {LABELS[field.label]} '
if field.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
fields.append(text + field.message_type.name)
fields.extend(extra_message(field.message_type, indent + ' '))
fields.extend(extra_message(field.message_type, f'{indent} '))
Comment on lines -119 to +122
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function extra_message refactored with the following changes:

else:
fields.append(text + TYPES[field.type])
# print (fields)
Expand All @@ -130,12 +133,12 @@ def generate_jsimpl(service_descriptor, package, prefix):
service_name = service_descriptor.name
print(f'generating {service_descriptor.full_name}')
for method in service_descriptor.methods:
req = star_indent + 'req: ' + method.input_type.name + star_indent
req = f'{star_indent}req: {method.input_type.name}{star_indent}'
req += star_indent.join(extra_message(method.input_type))
resp = star_indent + 'resp: ' + method.output_type.name + star_indent
resp = f'{star_indent}resp: {method.output_type.name}{star_indent}'
resp += star_indent.join(extra_message(method.output_type))
input_type = prefix + '.' + method.input_type.full_name
output_type = prefix + '.' + method.output_type.full_name
input_type = f'{prefix}.{method.input_type.full_name}'
output_type = f'{prefix}.{method.output_type.full_name}'
Comment on lines -133 to +141
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function generate_jsimpl refactored with the following changes:

requires.update([REQUIRE_TEMPLATE.safe_substitute(name=input_type),
REQUIRE_TEMPLATE.safe_substitute(name=output_type)])
in_out_types.extend([
Expand Down Expand Up @@ -175,7 +178,7 @@ def generate(path, output, package, prefix, root):
if not os.path.exists(output_path):
os.makedirs(output_path)
file_path = os.path.join(output_path, splits[-1][:-7])
with open(file_path + '_broadcast.js', 'w') as fp:
with open(f'{file_path}_broadcast.js', 'w') as fp:
Comment on lines -178 to +181
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function generate refactored with the following changes:

fp.write(content)


Expand Down
15 changes: 9 additions & 6 deletions plugin/js/jsrpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,12 @@ def parse_args():
def get_modules(root_path):
modules = []
for root, dirnames, filenames in os.walk(root_path):
for filename in filenames:
if filename.endswith('_pb2.py'):
modules.append(os.path.join(root, filename))
modules.extend(
os.path.join(root, filename)
for filename in filenames
if filename.endswith('_pb2.py')
)

Comment on lines -59 to +64
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function get_modules refactored with the following changes:

print('modules', modules)
return modules

Expand All @@ -83,8 +86,8 @@ def generate_js_rpc(service_descriptor, package, prefix):
service_name = service_descriptor.name
print('generating %s' % service_descriptor.full_name)
for method in service_descriptor.methods:
input_type = prefix + '.' + method.input_type.full_name
output_type = prefix + '.' + method.output_type.full_name
input_type = f'{prefix}.{method.input_type.full_name}'
output_type = f'{prefix}.{method.output_type.full_name}'
Comment on lines -86 to +90
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function generate_js_rpc refactored with the following changes:

requires.update([REQUIRE_TEMPLATE.safe_substitute(name=input_type),
REQUIRE_TEMPLATE.safe_substitute(name=output_type)])
mstr = METHOD_TEMPLATE.safe_substitute(
Expand Down Expand Up @@ -112,7 +115,7 @@ def generate(path, output, package, prefix, root):
if not os.path.exists(output_path):
os.makedirs(output_path)
file_path = os.path.join(output_path, splits[-1][:-7])
with open(file_path + '_rpc.js', 'w') as fp:
with open(f'{file_path}_rpc.js', 'w') as fp:
Comment on lines -115 to +118
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function generate refactored with the following changes:

fp.write(content)


Expand Down
3 changes: 1 addition & 2 deletions pymaid/cli/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@ def on_parse_callback(self, args):
if self.on_parse:
self.on_parse(args)
if self.subparsers:
subcmd = self.get_subcmd(args)
if subcmd:
if subcmd := self.get_subcmd(args):
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function ArgumentParser.on_parse_callback refactored with the following changes:

sub_parser = self.subparsers._name_parser_map[subcmd]
sub_parser.on_parse_callback(args)

Expand Down
3 changes: 1 addition & 2 deletions pymaid/conf/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,7 @@ async def run(self):
for item in update:
ns = item['namespaceName'].rsplit('.', 1)[0]
nid = item['notificationId']
data = get_data(ns, self.subscriptions[ns]['format'])
if data:
if data := get_data(ns, self.subscriptions[ns]['format']):
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function ApolloBackend.run refactored with the following changes:

self.subscriptions[ns]['notificationId'] = nid
delta[ns] = data

Expand Down
Loading