Skip to content

autoring: add -q option #1944

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 19 additions & 9 deletions tools/authoring/authoring.cc
Original file line number Diff line number Diff line change
Expand Up @@ -96,281 +96,291 @@
const auto inputs = args.positional();
const auto license = args.get<std::string>("license");
const bool asksForHelp = !!args.get<bool>("h");
const bool quiet = !!args.get<bool>("q");
const bool hasOutput = output.has_value();
const bool hasExactlyOneInput = inputs.size() == 1;

if (asksForHelp || !hasExactlyOneInput || !hasOutput) {
fmt::print(R"(
Usage: {} input.json [-h] -o output.bin
input.json mandatory: specify the input JSON file.
-o output.bin mandatory: name of the output file.
-basedir path optional: base directory for the input files.
-license file optional: use this license file.
-threads count optional: number of threads to use for compression.
-q optional: only print errors.
-h displays this help information and exit.
)",
argv[0]);
return -1;
}

auto input = inputs[0];
const std::filesystem::path basePath =
args.get<std::string>("basedir", std::filesystem::path(input).parent_path().string());
PCSX::IO<PCSX::File> indexFile(new PCSX::PosixFile(input));
if (indexFile->failed()) {
fmt::print("Unable to open file: {}\n", input);
return -1;
}
PCSX::FileAsContainer container(indexFile);
auto indexData = nlohmann::json::parse(container.begin(), container.end(), nullptr, false, true);
if (indexData.is_discarded()) {
fmt::print("Unable to parse JSON file: {}\n", input);
return -1;
}
if (indexData.is_null()) {
fmt::print("Unable to parse JSON file: {}\n", input);
return -1;
}

if (!indexData.is_object()) {
fmt::print("Invalid JSON file: {}\n", input);
return -1;
}

if (!indexData.contains("executable") || !indexData["executable"].is_string()) {
fmt::print("Invalid JSON file: {}\n", input);
return -1;
}

if (!indexData.contains("files") || !indexData["files"].is_array()) {
fmt::print("Invalid JSON file: {}\n", input);
return -1;
}

PCSX::IO<PCSX::File> out(new PCSX::PosixFile(output.value(), PCSX::FileOps::TRUNCATE));
if (out->failed()) {
fmt::print("Error opening output file {}\n", output.value());
return -1;
}
PCSX::ISO9660Builder builder(out);

PCSX::IO<PCSX::File> licenseFile(new PCSX::FailedFile);
if (license.has_value()) {
licenseFile.setFile(new PCSX::PosixFile(license.value()));
if (licenseFile->failed()) {
fmt::print("Error opening license file {}\n", license.value());
return -1;
}
}

const unsigned threadCount = args.get<unsigned>("threads", std::thread::hardware_concurrency());

nlohmann::json pvdData = nlohmann::json::object();
if (indexData.contains("pvd") && indexData["pvd"].is_object()) {
pvdData = indexData["pvd"];
}

auto executablePath = indexData["executable"].get<std::string>();
PCSX::IO<PCSX::File> executableFile(new PCSX::PosixFile(basePath / executablePath));
if (executableFile->failed()) {
fmt::print("Unable to open file: {}\n", executablePath);
return -1;
}

builder.writeLicense(licenseFile);

PCSX::BinaryLoader::Info info;
PCSX::IO<PCSX::Mem4G> memory(new PCSX::Mem4G());
std::map<uint32_t, std::string> symbols;
bool success = PCSX::BinaryLoader::load(executableFile, memory, info, symbols);
if (!success) {
fmt::print("Unable to load file: {}\n", executablePath);
return -1;
}
if (!info.pc.has_value()) {
fmt::print("File {} is invalid.\n", executablePath);
return -1;
}

const unsigned filesCount = indexData["files"].size();
const unsigned indexSectorsCount = ((filesCount + 1) * sizeof(IndexEntry) + 2047) / 2048;

if (filesCount > c_maximumSectorCount) {
fmt::print("Too many files specified ({}), max allowed is {}\n", filesCount, c_maximumSectorCount);
return -1;
}
fmt::print("Index size: {}\n", indexSectorsCount * 2048);
if (!quiet) {
fmt::print("Index size: {}\n", indexSectorsCount * 2048);
}

PCSX::PS1Packer::Options options;
options.booty = false;
options.raw = false;
options.rom = false;
options.cpe = false;
options.shell = false;
options.nokernel = true;
options.tload = false;
options.nopad = false;
PCSX::IO<PCSX::File> compressedExecutable(new PCSX::BufferFile(PCSX::FileOps::READWRITE));
PCSX::PS1Packer::pack(new PCSX::SubFile(memory, memory->lowestAddress(), memory->actualSize()),
compressedExecutable, memory->lowestAddress(), info.pc.value_or(0), info.gp.value_or(0),
info.sp.value_or(0), options);

if (compressedExecutable->size() % 2048 != 0) {
fmt::print("Executable size is not a multiple of 2048\n");
return -1;
}
fmt::print("Executable size: {}\n", compressedExecutable->size());
fmt::print("Executable location: {}\n", 23 + indexSectorsCount);
if (!quiet) {
fmt::print("Executable size: {}\n", compressedExecutable->size());
fmt::print("Executable location: {}\n", 23 + indexSectorsCount);
}

const unsigned executableSectorsCount = compressedExecutable->size() / 2048;
unsigned currentSector = 23 + indexSectorsCount;

for (unsigned i = 0; i < executableSectorsCount; i++) {
auto sector = compressedExecutable.asA<PCSX::BufferFile>()->borrow(i * 2048);
builder.writeSectorAt(sector.data<uint8_t>(), PCSX::IEC60908b::MSF{150 + currentSector++},
PCSX::IEC60908b::SectorMode::M2_FORM1);
}

std::unique_ptr<uint8_t[]> indexEntryDataBuffer(new uint8_t[indexSectorsCount * 2048]);
memset(indexEntryDataBuffer.get(), 0, indexSectorsCount * 2048);
std::span<IndexEntry> indexEntryData = {reinterpret_cast<IndexEntry*>(indexEntryDataBuffer.get()) + 1, filesCount};

struct WorkUnit {
WorkUnit() : semaphore(0), failed(false) {}
std::binary_semaphore semaphore;
std::vector<uint8_t> sectorData;
nlohmann::json fileInfo;
bool failed;
};
static WorkUnit work[c_maximumSectorCount];
for (unsigned i = 0; i < filesCount; i++) {
auto& fileInfo = indexData["files"][i];
if (!fileInfo.is_object()) {
fmt::print("Invalid JSON file: {}\n", input);
return -1;
}
if (!fileInfo.contains("path") || !fileInfo["path"].is_string()) {
fmt::print("Invalid JSON file: {}\n", input);
return -1;
}
work[i].fileInfo = fileInfo;
}
auto createSectorHeader = [](uint8_t sector[2352]) {
memset(sector + 1, 0xff, 10);
sector[15] = 2;
sector[18] = sector[22] = 8;
};

std::atomic<unsigned> currentWorkUnit = 0;
for (unsigned i = 0; i < threadCount; i++) {
std::thread t([&]() {
while (1) {
std::atomic_thread_fence(std::memory_order_acq_rel);
unsigned workUnitIndex = currentWorkUnit.fetch_add(1);
if (workUnitIndex >= filesCount) return;
auto& workUnit = work[workUnitIndex];
auto filePath = workUnit.fileInfo["path"].get<std::string>();
PCSX::IO<PCSX::File> file(new PCSX::PosixFile(basePath / filePath));
if (file->failed()) {
workUnit.failed = true;
workUnit.semaphore.release();
continue;
}
unsigned size = file->size();
if (size >= 2 * 1024 * 1024) {
workUnit.failed = true;
workUnit.semaphore.release();
continue;
}
unsigned originalSectorsCount = (size + 2047) / 2048;
std::vector<uint8_t> dataIn;
dataIn.resize(originalSectorsCount * 2048);
file->read(dataIn.data(), dataIn.size());

std::vector<uint8_t> dataOut;
dataOut.resize(dataIn.size() * 1.2 + 2064 + 2048);
ucl_uint outSize;
int r;

r = ucl_nrv2e_99_compress(dataIn.data(), size, dataOut.data() + 2048, &outSize, nullptr, 10, nullptr,
nullptr);
if (r != UCL_E_OK) {
workUnit.failed = true;
workUnit.semaphore.release();
continue;
}

unsigned compressedSectorsCount = (outSize + 2047) / 2048;

IndexEntry* entry = &indexEntryData[workUnitIndex];

if (workUnit.fileInfo["name"].is_string()) {
entry->hash = PCSX::djb::hash(workUnit.fileInfo["name"].get<std::string>());
} else {
entry->hash = PCSX::djb::hash(filePath);
}
entry->setDecompSize(size);
std::span<uint8_t> source;
unsigned sectorCount = 0;
if (compressedSectorsCount < originalSectorsCount) {
entry->setCompressedSize(compressedSectorsCount);
entry->setMethod(IndexEntry::Method::UCL_NRV2E);
unsigned padding = outSize % 2048;
if (padding > 0) {
padding = 2048 - padding;
}
entry->setPadding(padding);
sectorCount = compressedSectorsCount;
source = {reinterpret_cast<uint8_t*>(dataOut.data()) - padding + 2048, sectorCount * 2048};
} else {
entry->setCompressedSize(originalSectorsCount);
entry->setMethod(IndexEntry::Method::NONE);
entry->setPadding(0);
sectorCount = originalSectorsCount;
source = {reinterpret_cast<uint8_t*>(dataIn.data()), sectorCount * 2048};
}
workUnit.sectorData.resize(sectorCount * 2352);
for (unsigned sector = 0; sector < sectorCount; sector++) {
uint8_t* dest = workUnit.sectorData.data() + sector * 2352;
createSectorHeader(dest);
memcpy(dest + 24, source.data() + sector * 2048, 2048);
PCSX::IEC60908b::computeEDCECC(dest);
}
workUnit.semaphore.release();
}
});
t.detach();
}

auto putSectorLBA = [](uint8_t sector[2352], uint32_t lba) {
PCSX::IEC60908b::MSF time(lba + 150);
time.toBCD(sector + 12);
};

for (unsigned workUnitIndex = 0; workUnitIndex < filesCount; workUnitIndex++) {
auto& workUnit = work[workUnitIndex];
workUnit.semaphore.acquire();
std::atomic_thread_fence(std::memory_order_acq_rel);
if (workUnit.failed) {
fmt::print("Error processing file: {}\n", workUnit.fileInfo["path"].get<std::string>());
return -1;
}
IndexEntry* entry = &indexEntryData[workUnitIndex];
fmt::print("Processed file: {}\n", workUnit.fileInfo["path"].get<std::string>());
fmt::print(" Original size: {}\n", entry->getDecompSize());
fmt::print(" Compressed size: {}\n", entry->getCompressedSize() * 2048);
fmt::print(" Compression method: {}\n", static_cast<uint32_t>(entry->getCompressionMethod()));
fmt::print(" Sector offset: {}\n", currentSector);
if (!quiet) {
fmt::print("Processed file: {}\n", workUnit.fileInfo["path"].get<std::string>());
fmt::print(" Original size: {}\n", entry->getDecompSize());
fmt::print(" Compressed size: {}\n", entry->getCompressedSize() * 2048);
fmt::print(" Compression method: {}\n", static_cast<uint32_t>(entry->getCompressionMethod()));
fmt::print(" Sector offset: {}\n", currentSector);
}
entry->setSectorOffset(currentSector);
unsigned sectorCount = entry->getCompressedSize();
for (unsigned sector = 0; sector < sectorCount; sector++) {
uint8_t* dest = workUnit.sectorData.data() + sector * 2352;
putSectorLBA(dest, currentSector);
builder.writeSectorAt(dest, PCSX::IEC60908b::MSF{150 + currentSector++}, PCSX::IEC60908b::SectorMode::RAW);
}
}

fmt::print("Processed {} files.\n", filesCount);
if (!quiet) {
fmt::print("Processed {} files.\n", filesCount);
}

Check warning on line 383 in tools/authoring/authoring.cc

View check run for this annotation

CodeScene Delta Analysis / CodeScene Cloud Delta Analysis (main)

❌ Getting worse: Complex Method

main increases in cyclomatic complexity from 55 to 59, threshold = 9. This function has many conditional statements (e.g. if, for, while), leading to lower code health. Avoid adding more conditionals and code to it without refactoring.

uint8_t empty[2048] = {0};
for (unsigned i = 0; i < 150; i++) {
Expand Down
Loading