Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/index.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/index.js.map

Large diffs are not rendered by default.

74 changes: 74 additions & 0 deletions esm/interpreter/_remote_package.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import fetch from '@webreflection/fetch';

import { toml } from '../3rd-party.js';

const { parse } = JSON;

const href = (key, pkg) => new URL(key, pkg).href;

const addPath = (target, key, value) => {
if (key in target)
throw new Error(`Duplicated path: ${key}`);
target[key] = value;
};

const addPaths = (target, source, pkg) => {
for (const key in source)
addPath(target, href(key, pkg), source[key]);
};

const pollute = (t_js_modules, s_js_modules, name, pkg) => {
const source = s_js_modules[name];
if (source) {
t_js_modules[name] ??= {};
addPaths(t_js_modules[name], source, pkg);
}
};

const remote = async (
config,
packages = config.packages,
set = new Set(),
) => {
const repackaged = [];
for (const pkg of packages) {
// avoid re-processing already processed packages
if (set.has(pkg)) continue;
set.add(pkg);
const isTOML = pkg.endsWith('.toml');
if (isTOML || pkg.endsWith('.json')) {
const text = await fetch(pkg).text();
const {
name,
files,
js_modules,
packages,
} = isTOML ? await toml(text) : parse(text);

if (set.has(name))
throw new Error(`Unable to process ${name} @ ${pkg}`);

set.add(name);

if (packages) {
// process nested packages from the remote config
repackaged.push(...(await remote(config, packages, set)));
}

if (js_modules) {
config.js_modules ??= {};
pollute(config.js_modules, js_modules, 'main', pkg);
pollute(config.js_modules, js_modules, 'worker', pkg);
}

if (files) {
config.files ??= {};
addPaths(config.files, files, pkg);
}
}
else repackaged.push(pkg);
}
return repackaged;
};

export default remote;
6 changes: 6 additions & 0 deletions esm/interpreter/micropython.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import mip from '../python/mip.js';
import { zip } from '../3rd-party.js';

import { initializeNativeFS } from './_nativefs.js';
import _remote_package from './_remote_package.js';

const type = 'micropython';

Expand Down Expand Up @@ -44,6 +45,11 @@ export default {
// Install Micropython Package
this.writeFile(interpreter, './mip.py', mip);
if (config.packages) {
if (config.experimental_remote_packages) {
progress('Loading remote packages');
config.packages = await _remote_package(config);
progress('Loaded remote packages');
}
progress('Loading packages');
await py_imports(config.packages.map(fixedRelative, baseURL));
progress('Loaded packages');
Expand Down
62 changes: 36 additions & 26 deletions esm/interpreter/pyodide.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { createProgress, writeFile } from './_utils.js';
import { getFormat, loader, loadProgress, registerJSModule, run, runAsync, runEvent } from './_python.js';
import { stdio } from './_io.js';
import { IDBMapSync, isArray, fixedRelative, js_modules } from '../utils.js';
import _remote_package from './_remote_package.js';

const type = 'pyodide';
const toJsOptions = { dict_converter: Object.fromEntries };
Expand Down Expand Up @@ -84,46 +85,55 @@ export default {
// https://github.com/pyodide/pyodide/issues/5736
const save = config.packages_cache !== 'never' && version !== '0.28.0';
await storage.sync();
progress('Loaded Storage');
// packages_cache = 'never' means: erase the whole DB
if (!save) storage.clear();
// otherwise check if cache is known
else if (packages) {
// packages_cache = 'passthrough' means: do not use micropip.install
if (config.packages_cache === 'passthrough') {
options.packages = packages;
packages = null;
storage.clear();
if (packages) {
if (config.experimental_remote_packages) {
progress('Loading remote packages');
config.packages = (packages = await _remote_package(config, packages));
progress('Loaded remote packages');
}
else {
packages = packages.sort();
// packages are uniquely stored as JSON key
const key = stringify(packages);
if (storage.has(key)) {
const value = storage.get(key);
if (save) {
// packages_cache = 'passthrough' means: do not use micropip.install
if (config.packages_cache === 'passthrough') {
options.packages = packages;
packages = null;
storage.clear();
}
else {
packages = packages.sort();
// packages are uniquely stored as JSON key
const key = stringify(packages);
if (storage.has(key)) {
const value = storage.get(key);

// versions are not currently understood by pyodide when
// a lockFileURL is used instead of micropip.install(packages)
// https://github.com/pyodide/pyodide/issues/5135#issuecomment-2441038644
// https://github.com/pyscript/pyscript/issues/2245
options.packages = packages.map(name => name.split(/[>=<]=/)[0]);
// versions are not currently understood by pyodide when
// a lockFileURL is used instead of micropip.install(packages)
// https://github.com/pyodide/pyodide/issues/5135#issuecomment-2441038644
// https://github.com/pyscript/pyscript/issues/2245
options.packages = packages.map(name => name.split(/[>=<]=/)[0]);

if (version.startsWith('0.27')) {
const blob = new Blob([value], { type: 'application/json' });
options.lockFileURL = URL.createObjectURL(blob);
}
else {
options.lockFileContents = value;
}
if (version.startsWith('0.27')) {
const blob = new Blob([value], { type: 'application/json' });
options.lockFileURL = URL.createObjectURL(blob);
}
else {
options.lockFileContents = value;
}

packages = null;
packages = null;
}
}
}
}
progress('Loaded Storage');
const { stderr, stdout, get } = stdio();
progress('Loading interpreter');
const interpreter = await get(
loadPyodide({ stderr, stdout, ...options }),
);
progress('Loaded interpreter');
globalThis[js_modules].set('-T-', this.transform.bind(this, interpreter));
if (config.debug) interpreter.setDebug(true);
const py_imports = importPackages.bind(interpreter);
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,6 @@
"to-json-callback": "^0.1.1"
},
"worker": {
"blob": "sha256-1jyBlknYo68LxBQF5bk5wMmmxqX19PGLdaNSxKljS08="
"blob": "sha256-u4s/ss3EOnkTX4hrZ/GUovLYI9GxTeDHu+26khZiSnA="
}
}