-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #2 from sean1832/dev
Adapting new protocal for Portal Networking and Packet Data Structures
- Loading branch information
Showing
12 changed files
with
501 additions
and
76 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,8 @@ | ||
*.zip | ||
/bin | ||
venv/ | ||
venv/ | ||
__pycache__/ | ||
*.pyc | ||
test_project[s] | ||
dist/ | ||
build/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,150 @@ | ||
import ast | ||
import os | ||
import pathlib | ||
import re | ||
|
||
|
||
def merge_files(output_file, source_files): | ||
import_set = set() # To keep track of unique imports | ||
merged_code = "" | ||
|
||
for fname in source_files: | ||
with open(fname) as infile: | ||
for line in infile: | ||
# Check if the line is an import statement | ||
import_match = re.match(r"^\s*(import|from)\s+(\S+)", line) | ||
if import_match: | ||
import_statement = import_match.group(0) | ||
# Skip local imports that start with a dot | ||
if not re.match(r"^\s*(import|from)\s+\.", import_statement): | ||
# Add to import set if it's a new import | ||
if import_statement not in import_set: | ||
import_set.add(import_statement) | ||
merged_code += line | ||
else: | ||
# Add non-import lines to the merged code | ||
merged_code += line | ||
merged_code += "\n\n" # Ensure separation between files | ||
|
||
# create output directory if it doesn't exist | ||
os.makedirs(os.path.dirname(output_file), exist_ok=True) | ||
# Write the merged code to the output file | ||
with open(output_file, "w") as outfile: | ||
outfile.write(merged_code) | ||
|
||
print(f"Merged files into {output_file}") | ||
|
||
|
||
# ------------------------------------------------------------ | ||
# Post processing | ||
# ------------------------------------------------------------ | ||
|
||
|
||
def list_top_level_functions(source_file): | ||
with open(source_file, "r") as file: | ||
tree = ast.parse(file.read(), filename=source_file) | ||
|
||
attach_parents(tree) # Attach parent references before processing | ||
|
||
top_level_functions = [] | ||
|
||
for node in ast.walk(tree): | ||
# Check for top-level function definitions | ||
if isinstance(node, ast.FunctionDef): | ||
# Ensure the function is not within a class | ||
if not within_class(node): | ||
top_level_functions.append(node) | ||
|
||
return top_level_functions | ||
|
||
|
||
def within_class(node): | ||
""" | ||
Check if the given node is within a class definition. | ||
""" | ||
while hasattr(node, "parent"): | ||
node = node.parent | ||
if isinstance(node, ast.ClassDef): | ||
return True | ||
return False | ||
|
||
|
||
def attach_parents(tree): | ||
""" | ||
Attach parent references to each node in the AST. | ||
""" | ||
for node in ast.walk(tree): | ||
for child in ast.iter_child_nodes(node): | ||
child.parent = node | ||
|
||
|
||
def remove_duplicate_functions(source_file): | ||
with open(source_file, "r") as file: | ||
tree = ast.parse(file.read(), filename=source_file) | ||
|
||
attach_parents(tree) # Attach parent references before processing | ||
|
||
func_names = set() | ||
nodes_to_remove = [] | ||
|
||
for node in ast.walk(tree): | ||
if isinstance(node, ast.FunctionDef): | ||
if not within_class(node): | ||
if node.name in func_names: | ||
nodes_to_remove.append(node) | ||
else: | ||
func_names.add(node.name) | ||
|
||
# Remove the nodes from the source by reconstructing the source code | ||
lines = open(source_file).readlines() | ||
for node in nodes_to_remove: | ||
start_lineno = node.lineno - 1 | ||
end_lineno = node.end_lineno if hasattr(node, "end_lineno") else node.lineno | ||
for i in range(start_lineno, end_lineno): | ||
lines[i] = "" | ||
|
||
with open(source_file, "w") as file: | ||
file.writelines(lines) | ||
|
||
print(f"Removed duplicate top-level functions from {source_file}") | ||
|
||
|
||
def read_version(source_dir): | ||
version_file = pathlib.Path(source_dir, "__init__.py") | ||
if version_file.exists(): | ||
with version_file.open() as f: | ||
content = f.read() | ||
version_match = re.search(r"\"version\":\s*\((\d+),\s*(\d+),\s*(\d+)\)", content) | ||
if version_match: | ||
version = ".".join(version_match.groups()) | ||
return version | ||
return None | ||
|
||
|
||
if __name__ == "__main__": | ||
# List your Python files in the order you want them to be merged. | ||
source_files = [ | ||
"portal/utils/color.py", | ||
"portal/data_struct/packet.py", | ||
"portal/handlers.py", | ||
"portal/server/mmap_server.py", | ||
"portal/server/udp_server.py", | ||
"portal/server/pipe_server.py", | ||
"portal/server/websockets_server.py", | ||
"portal/managers.py", | ||
"portal/operators.py", | ||
"portal/panels.py", | ||
"portal/__init__.py", | ||
] | ||
|
||
# Output single script file | ||
version = read_version("portal") | ||
output_file = f"bin/portal.blender-{version}.py" | ||
|
||
merge_files(output_file, source_files) | ||
|
||
remove_duplicate_functions(output_file) | ||
|
||
with open(output_file, "r") as file: | ||
tree = ast.parse(file.read(), filename=output_file) | ||
attach_parents(tree) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python pack.py portal bin |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
import struct | ||
|
||
|
||
class PacketHeader: | ||
def __init__(self, is_encrypted, is_compressed, size, checksum): | ||
self.is_compressed = is_compressed | ||
self.is_encrypted = is_encrypted | ||
self.size = size | ||
self.checksum = checksum | ||
|
||
@property | ||
def IsCompressed(self): | ||
return self.is_compressed | ||
|
||
@property | ||
def IsEncrypted(self): | ||
return self.is_encrypted | ||
|
||
@property | ||
def Size(self): | ||
return self.size | ||
|
||
@property | ||
def Checksum(self): | ||
return self.checksum | ||
|
||
@staticmethod | ||
def get_expected_size(): | ||
# see | ||
return 8 # 1 + 1 + 2 + 4 | ||
|
||
|
||
class Packet: | ||
MAGIC_NUMBER = b"pk" # pk | ||
|
||
def __init__( | ||
self, data, size=None, checksum=None, is_encrypted=None, is_compressed=None, header=None | ||
): | ||
self.data = data | ||
if header is not None: | ||
self.header = header | ||
else: | ||
computed_size = size if size is not None else len(data) | ||
self.header = PacketHeader(is_encrypted, is_compressed, computed_size, checksum) | ||
|
||
def serialize(self): | ||
header_bytes = bytearray() | ||
header_bytes.extend(Packet.MAGIC_NUMBER) # magic number | ||
header_bytes.append(1 if self.header.is_compressed else 0) # is_compressed flag | ||
header_bytes.append(1 if self.header.is_encrypted else 0) # is_encrypted flag | ||
header_bytes.extend(struct.pack("H", self.header.checksum)) # checksum | ||
header_bytes.extend(struct.pack("i", self.header.size)) # size | ||
return bytes(header_bytes) + self.data # combine header and data | ||
|
||
@staticmethod | ||
def validate_magic_number(data): | ||
# minimum size of a packet is the magic number and the header | ||
if len(data) < len(Packet.MAGIC_NUMBER): | ||
raise ValueError("Data is too short to be a valid packet") | ||
|
||
# check magic number | ||
if data[: len(Packet.MAGIC_NUMBER)] != Packet.MAGIC_NUMBER: | ||
raise ValueError("Data does not contain the magic number") | ||
|
||
@staticmethod | ||
def deserialize(data): | ||
Packet.validate_magic_number(data) | ||
index = len(Packet.MAGIC_NUMBER) # start after magic number | ||
header = Packet.deserialize_header(data, index) | ||
|
||
payload_data = data[ | ||
index + PacketHeader.get_expected_size() : index | ||
+ PacketHeader.get_expected_size() | ||
+ header.Size | ||
] | ||
packet = Packet(payload_data, header=header) | ||
|
||
return packet | ||
|
||
@staticmethod | ||
def deserialize_header(data, index): | ||
# read flags | ||
is_compressed = data[index] == 1 | ||
index += 1 | ||
is_encrypted = data[index] == 1 | ||
index += 1 | ||
|
||
# read checksum | ||
checksum = struct.unpack_from("H", data, index)[0] | ||
index += struct.calcsize("H") | ||
|
||
# read size | ||
size = struct.unpack_from("i", data, index)[0] | ||
index += struct.calcsize("i") | ||
|
||
return PacketHeader(is_encrypted, is_compressed, size, checksum) | ||
|
||
@staticmethod | ||
def deserialize_header_start(data, start_index=0): | ||
return Packet.deserialize_header(data, start_index) |
Oops, something went wrong.