Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 1 addition & 43 deletions llm/continuous_learning_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@
import asyncio
import json
import logging
import pickle
import time
import os
from typing import Dict, List, Any, Optional
from dataclasses import dataclass, asdict
from datetime import datetime
import numpy as np
import hashlib
import json
from pathlib import Path

# Import existing components
Expand Down Expand Up @@ -290,12 +290,6 @@ async def rollback_model(self, version_id: str) -> Dict[str, Any]:
version_id: Version ID to rollback to
"""
try:
copilot/fix-94a3a2ef-451e-4b72-9782-aff6506fa546
# Find version in history
version_path = self.model_dir / f"{version_id}.json"

if not version_path.exists():

# Find version in history (try JSON first, then pickle for backward compatibility)
json_path = self.model_dir / f"{version_id}.json"
pkl_path = self.model_dir / f"{version_id}.pkl"
Expand All @@ -309,19 +303,11 @@ async def rollback_model(self, version_id: str) -> Dict[str, Any]:
with open(pkl_path, "rb") as f:
model_data = pickle.load(f)
else:
master
return {
"success": False,
"error": f"Model version {version_id} not found",
}

copilot/fix-94a3a2ef-451e-4b72-9782-aff6506fa546
# Load the version
with open(version_path, "r") as f:
model_data = json.load(f)

=======
master
# Set as current model
self.current_model_version = model_data["version_info"]

Expand Down Expand Up @@ -597,15 +583,7 @@ async def _create_model_version(
training_data_size=self.training_stats["total_samples_processed"],
quantum_optimized=self.quantum_connector.connected,
file_path=str(self.model_dir / f"{version_id}.json"),
copilot/fix-213aa9e3-0b23-4bd9-9b0c-2eb2bc585c94
checksum=hashlib.sha256(version_id.encode()).hexdigest(),

copilot/fix-94a3a2ef-451e-4b72-9782-aff6506fa546
checksum=hashlib.sha256(version_id.encode()).hexdigest(),

checksum=hashlib.md5(version_id.encode()).hexdigest(),
master
master
)

# Save model version using custom JSON encoder
Expand All @@ -615,18 +593,8 @@ async def _create_model_version(
"model_state": "simulated_model_state",
}

copilot/fix-213aa9e3-0b23-4bd9-9b0c-2eb2bc585c94
with open(version.file_path, "w") as f:
json.dump(model_data, f, indent=2, default=str)

copilot/fix-94a3a2ef-451e-4b72-9782-aff6506fa546
with open(version.file_path, "w") as f:
json.dump(model_data, f, indent=2, default=str)

with open(version.file_path, "w", encoding="utf-8") as f:
json.dump(model_data, f, cls=ModelVersionJSONEncoder, indent=2)
master
master

# Update current version
self.current_model_version = version
Expand Down Expand Up @@ -667,26 +635,16 @@ async def _training_loop(self):
async def _load_or_create_model(self):
"""Load existing model or create new one"""
try:
copilot/fix-94a3a2ef-451e-4b72-9782-aff6506fa546
# Look for existing model versions
model_files = list(self.model_dir.glob("*.json"))

# Look for existing model versions (first try JSON, then fallback to PKL for backward compatibility)
json_files = list(self.model_dir.glob("*.json"))
pkl_files = list(self.model_dir.glob("*.pkl"))

copilot/fix-213aa9e3-0b23-4bd9-9b0c-2eb2bc585c94
with open(latest_file, "r") as f:
model_data = json.load(f)

if json_files:
# Load latest JSON version
latest_file = max(json_files, key=lambda f: f.stat().st_mtime)
master

with open(latest_file, "r", encoding="utf-8") as f:
model_data = json.load(f, cls=ModelVersionJSONDecoder)
master

self.current_model_version = model_data["version_info"]
logger.info(
Expand Down
9 changes: 6 additions & 3 deletions protocols/data_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ def process(self):
"""Process data files and extract insights"""
return task()

def task():

def task(data_path=None):
"""Process data files and extract insights"""
# Use provided data path or try multiple possible data directories
if data_path and os.path.exists(data_path) and os.path.isdir(data_path):
Expand Down Expand Up @@ -79,7 +80,8 @@ def task():
insights.append(
f"{filename}: {type(data).__name__} with {len(data) if isinstance(data, (list, dict)) else 1} items"
)
except:
except (json.JSONDecodeError, OSError):
# Skip files that can't be read or parsed
pass

elif filename.endswith(".csv"):
Expand All @@ -90,7 +92,8 @@ def task():
total_records += row_count
processed_count += 1
insights.append(f"{filename}: CSV with {row_count} rows")
except BaseException:
except (csv.Error, OSError):
# Skip CSV files that can't be read or parsed
pass

# Always return success if we got this far
Expand Down
3 changes: 2 additions & 1 deletion protocols/user_data_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ def task():
"size": size,
}
)
except:
except OSError:
# Ignore files we cannot access or whose size cannot be determined
pass

# Generate insights
Expand Down
3 changes: 2 additions & 1 deletion tests/test_mcp_compliance.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,8 @@ def run_compliance_check():
content = py_file.read_text()
if 'TODO:' in content or 'FIXME:' in content:
placeholder_count += 1
except:
except (OSError, UnicodeDecodeError):
# Intentionally ignore files that can't be read/decoded; skip them in this quick placeholder scan
pass

if placeholder_count > 0:
Expand Down
Loading