2024-07-09 00:06:23 -04:00
|
|
|
// ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
|
|
|
|
// ┃ ██████ ██████ ██████ █ █ █ █ █ █▄ ▀███ █ ┃
|
|
|
|
|
// ┃ ▄▄▄▄▄█ █▄▄▄▄▄ ▄▄▄▄▄█ ▀▀▀▀▀█▀▀▀▀▀ █ ▀▀▀▀▀█ ████████▌▐███ ███▄ ▀█ █ ▀▀▀▀▀ ┃
|
|
|
|
|
// ┃ █▀▀▀▀▀ █▀▀▀▀▀ █▀██▀▀ ▄▄▄▄▄ █ ▄▄▄▄▄█ ▄▄▄▄▄█ ████████▌▐███ █████▄ █ ▄▄▄▄▄ ┃
|
|
|
|
|
// ┃ █ ██████ █ ▀█▄ █ ██████ █ ███▌▐███ ███████▄ █ ┃
|
|
|
|
|
// ┣━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┫
|
|
|
|
|
// ┃ Copyright (c) 2017, the Perspective Authors. ┃
|
|
|
|
|
// ┃ ╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌ ┃
|
|
|
|
|
// ┃ This file is part of the Perspective library, distributed under the terms ┃
|
|
|
|
|
// ┃ of the [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0). ┃
|
|
|
|
|
// ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
Merge Perspective-Python and build out Node/Pybind
can construct ctx0, ctx1, ctx2
add better test, move low level tests down
fix test
clean up build/test/lint scripts
WIP: make python API more symmetric to JS API
process table data, working view constructors
edit python dockerfile
update style
more types supported
don't purge docker assets until end of test stage
don't allow table to fail, try to install packages locally
update build scripts to install deps
assert in table tests
Fixed Dockerfile
don't use target=/Volumes/files/jpmc/perspective locally
forward API to latest master
add autopep8 fix script
no -r
avoid global, remove dependency install
don't install in local folder
merge master changes
style fixes
refactoring python code to be modular, add docstring examples, fix some style issues, add support for format=1 and format=2 tables, add initial support for numpy and pandas types
seperate script for building/linting/testing all in one
install optionals
remove individual license, remap badges in readme, remove perspective-python travis, remove bettercodehub
fix test, use warning instead of warn
add codecov upload
bugfix for segfault
lint in precommit
WIP: to_dict(), add scalar_to_py
split apart pythons C++ code, don't make vectors and maps opaque
readd comented stuff, remove some more todos
support some numpy types
merge from pyapi
merge from pyapi
add scalar_to_py for outputting data
fix bug with boolean columns
working to_dict, to_columns for 0, 1, 2 sided views
refactor to_dict, to_column, add limit and index tests
update working
refactor data formatter, start on numpy export
support python-only numpy
add numpy tests
remove working
fix performance issue, make aggregates a dict
parse strings with datetime, accept string schemas, add more tests
to_records() and to_dict() replace to_dict() and to_columns()
read and write date(), datetime() objects, parse date/datetime strings
don't install before build
revamp docs
add docs framework for all packages, generate markdown for outputs including autodocs
update readmes to remove gitter, extract pandas flattening, change type mapping
convert to UTC in tests
travis is in UTC
adding pandas export
add simple to_df tests
2019-06-17 13:34:06 -04:00
|
|
|
|
2024-08-05 17:58:47 -04:00
|
|
|
import * as fs from "node:fs";
|
2025-10-25 19:51:40 -04:00
|
|
|
import sh from "../../tools/scripts/sh.mjs";
|
2024-07-08 17:27:03 -04:00
|
|
|
import * as url from "url";
|
2025-09-27 22:13:38 -04:00
|
|
|
import * as toml from "@iarna/toml";
|
2024-10-24 17:17:18 -07:00
|
|
|
import * as tar from "tar";
|
|
|
|
|
import * as path from "path";
|
2025-09-27 22:13:38 -04:00
|
|
|
import "zx/globals";
|
2024-07-08 17:27:03 -04:00
|
|
|
|
|
|
|
|
const __dirname = url.fileURLToPath(new URL(".", import.meta.url)).slice(0, -1);
|
2024-09-06 13:23:28 -07:00
|
|
|
const pkg = JSON.parse(
|
2025-09-27 22:13:38 -04:00
|
|
|
fs.readFileSync(__dirname + "/package.json", { encoding: "utf-8" }),
|
2024-09-06 13:23:28 -07:00
|
|
|
);
|
Merge Perspective-Python and build out Node/Pybind
can construct ctx0, ctx1, ctx2
add better test, move low level tests down
fix test
clean up build/test/lint scripts
WIP: make python API more symmetric to JS API
process table data, working view constructors
edit python dockerfile
update style
more types supported
don't purge docker assets until end of test stage
don't allow table to fail, try to install packages locally
update build scripts to install deps
assert in table tests
Fixed Dockerfile
don't use target=/Volumes/files/jpmc/perspective locally
forward API to latest master
add autopep8 fix script
no -r
avoid global, remove dependency install
don't install in local folder
merge master changes
style fixes
refactoring python code to be modular, add docstring examples, fix some style issues, add support for format=1 and format=2 tables, add initial support for numpy and pandas types
seperate script for building/linting/testing all in one
install optionals
remove individual license, remap badges in readme, remove perspective-python travis, remove bettercodehub
fix test, use warning instead of warn
add codecov upload
bugfix for segfault
lint in precommit
WIP: to_dict(), add scalar_to_py
split apart pythons C++ code, don't make vectors and maps opaque
readd comented stuff, remove some more todos
support some numpy types
merge from pyapi
merge from pyapi
add scalar_to_py for outputting data
fix bug with boolean columns
working to_dict, to_columns for 0, 1, 2 sided views
refactor to_dict, to_column, add limit and index tests
update working
refactor data formatter, start on numpy export
support python-only numpy
add numpy tests
remove working
fix performance issue, make aggregates a dict
parse strings with datetime, accept string schemas, add more tests
to_records() and to_dict() replace to_dict() and to_columns()
read and write date(), datetime() objects, parse date/datetime strings
don't install before build
revamp docs
add docs framework for all packages, generate markdown for outputs including autodocs
update readmes to remove gitter, extract pandas flattening, change type mapping
convert to UTC in tests
travis is in UTC
adding pandas export
add simple to_df tests
2019-06-17 13:34:06 -04:00
|
|
|
|
2024-07-09 00:06:23 -04:00
|
|
|
let flags = "--release";
|
2024-10-11 11:47:07 -07:00
|
|
|
let features = [];
|
2024-07-09 00:06:23 -04:00
|
|
|
if (!!process.env.PSP_DEBUG) {
|
|
|
|
|
flags = "";
|
|
|
|
|
}
|
2020-09-09 14:44:38 -04:00
|
|
|
|
2024-07-08 17:27:03 -04:00
|
|
|
const python_version = process.env.PSP_PYTHON_VERSION || "3.12";
|
|
|
|
|
const is_pyodide = !!process.env.PSP_PYODIDE;
|
2024-07-09 00:22:54 -04:00
|
|
|
|
2025-05-12 12:57:03 -04:00
|
|
|
const version = pkg.version;
|
2024-08-05 17:58:47 -04:00
|
|
|
|
|
|
|
|
fs.mkdirSync(`./perspective_python-${version}.data`, { recursive: true });
|
2024-11-08 16:57:53 -08:00
|
|
|
fs.copyFileSync("../../LICENSE.md", "./LICENSE.md");
|
2024-08-05 17:58:47 -04:00
|
|
|
|
2024-07-08 17:27:03 -04:00
|
|
|
const cwd = process.cwd();
|
|
|
|
|
const cmd = sh();
|
|
|
|
|
|
|
|
|
|
if (is_pyodide) {
|
|
|
|
|
const emsdkdir = sh.path`${__dirname}/../../.emsdk`;
|
|
|
|
|
const { emscripten } = JSON.parse(
|
2025-09-27 22:13:38 -04:00
|
|
|
fs.readFileSync(sh.path`${__dirname}/../../package.json`),
|
2024-07-08 17:27:03 -04:00
|
|
|
);
|
|
|
|
|
cmd.sh`cd ${emsdkdir}`.sh`. ./emsdk_env.sh`
|
|
|
|
|
.sh`./emsdk activate ${emscripten}`.sh`cd ${cwd}`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// if not windows
|
|
|
|
|
if (process.platform !== "win32") {
|
|
|
|
|
cmd.env({
|
|
|
|
|
PSP_ROOT_DIR: "../..",
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const build_wheel = !!process.env.PSP_BUILD_WHEEL || is_pyodide;
|
2024-07-28 21:12:49 -04:00
|
|
|
const build_sdist = !!process.env.PSP_BUILD_SDIST;
|
|
|
|
|
|
2025-01-08 21:50:07 -05:00
|
|
|
let target = "";
|
|
|
|
|
if (is_pyodide) {
|
|
|
|
|
target = `--target=wasm32-unknown-emscripten -i${python_version}`;
|
|
|
|
|
} else if (process.env.PSP_ARCH === "x86_64" && process.platform === "darwin") {
|
|
|
|
|
target = "--target=x86_64-apple-darwin";
|
|
|
|
|
} else if (
|
|
|
|
|
process.env.PSP_ARCH === "aarch64" &&
|
|
|
|
|
process.platform === "darwin"
|
|
|
|
|
) {
|
|
|
|
|
target = "--target=aarch64-apple-darwin";
|
|
|
|
|
} else if (process.env.PSP_ARCH === "x86_64" && process.platform === "linux") {
|
|
|
|
|
target = "--target=x86_64-unknown-linux-gnu --compatibility manylinux_2_28";
|
|
|
|
|
} else if (process.env.PSP_ARCH === "aarch64" && process.platform === "linux") {
|
|
|
|
|
target = "--target=aarch64-unknown-linux-gnu";
|
|
|
|
|
}
|
2024-07-09 00:22:54 -04:00
|
|
|
|
2025-01-08 21:50:07 -05:00
|
|
|
if (build_wheel) {
|
2024-09-05 11:34:39 -07:00
|
|
|
if (!!process.env.PSP_BUILD_VERBOSE) {
|
|
|
|
|
flags += " -vv";
|
|
|
|
|
}
|
|
|
|
|
|
2024-10-11 11:47:07 -07:00
|
|
|
if (process.env.CONDA_BUILD === "1") {
|
2024-10-17 19:25:37 -07:00
|
|
|
console.log("Building with Conda flags and features");
|
2024-10-11 11:47:07 -07:00
|
|
|
if (process.env.PYTHON) {
|
|
|
|
|
console.log(`interpreter: ${process.env.PYTHON}`);
|
|
|
|
|
flags += ` --interpreter=${process.env.PYTHON}`;
|
|
|
|
|
} else {
|
|
|
|
|
console.warn(
|
2025-09-27 22:13:38 -04:00
|
|
|
"Expected PYTHON to be set in CONDA_BUILD environment, but it isn't. maturin will likely detect the wrong Python.",
|
2024-10-11 11:47:07 -07:00
|
|
|
);
|
|
|
|
|
}
|
2024-10-17 19:25:37 -07:00
|
|
|
// we need to generate proto.rs using conda's protoc, which is set in
|
|
|
|
|
// the environment. we use the unstable "versioned" python abi
|
2025-05-04 13:25:10 -04:00
|
|
|
features.push(["generate-proto"]);
|
2024-10-11 11:47:07 -07:00
|
|
|
} else {
|
2024-10-17 19:25:37 -07:00
|
|
|
// standard for in-repo builds. a different set will be standard in the sdist
|
2025-05-04 13:25:10 -04:00
|
|
|
const standard_features = ["abi3", "generate-proto", "protobuf-src"];
|
2025-01-08 21:50:18 -05:00
|
|
|
|
2024-10-17 19:25:37 -07:00
|
|
|
console.log("Building with standard flags and features");
|
|
|
|
|
features.push(...standard_features);
|
2024-10-11 11:47:07 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
cmd.sh(`maturin build ${flags} --features=${features.join(",")} ${target}`);
|
2024-07-28 21:12:49 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (build_sdist) {
|
2024-10-24 17:17:18 -07:00
|
|
|
// `maturin sdist` has some issues with Cargo workspaces, so we assemble the sdist by hand here
|
|
|
|
|
// Note that the resulting sdist is _not_ a Cargo workspace, it is rooted in this package.
|
2025-05-04 13:25:10 -04:00
|
|
|
const cargo_toml = fs.readFileSync("./Cargo.toml").toString("utf-8");
|
|
|
|
|
const pyproject_toml = fs
|
|
|
|
|
.readFileSync("./pyproject.toml")
|
|
|
|
|
.toString("utf-8");
|
2025-02-05 23:06:43 +00:00
|
|
|
const cargo = toml.parse(cargo_toml);
|
|
|
|
|
const pyproject = toml.parse(pyproject_toml);
|
2024-10-24 17:17:18 -07:00
|
|
|
|
|
|
|
|
const version = cargo["package"]["version"];
|
|
|
|
|
const data_dir = `perspective_python-${version}.data`;
|
|
|
|
|
const testfile = path.join(
|
|
|
|
|
data_dir,
|
2025-10-25 19:51:40 -04:00
|
|
|
"data/share/jupyter/labextensions/@perspective-dev/jupyterlab/package.json",
|
2024-10-24 17:17:18 -07:00
|
|
|
);
|
|
|
|
|
if (!fs.existsSync(testfile)) {
|
|
|
|
|
throw new Error(
|
2025-09-27 22:13:38 -04:00
|
|
|
"labextension is not present in data directory, please build `perspective-jupyterlab`",
|
2024-10-24 17:17:18 -07:00
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
const readme_md = fs.readFileSync("./README.md");
|
|
|
|
|
const pkg_info = generatePkgInfo(pyproject, cargo, readme_md);
|
|
|
|
|
fs.writeFileSync("./PKG-INFO", pkg_info);
|
2025-01-08 21:50:18 -05:00
|
|
|
|
2024-11-08 16:57:53 -08:00
|
|
|
// Maturin finds extra license files in the root of the source directory,
|
|
|
|
|
// then packages them into .dist-info in the wheel. As of Nov 2024,
|
|
|
|
|
// Maturin does not yet support explicitly declaring `license-files` in
|
|
|
|
|
// pyproject.toml. See https://github.com/PyO3/maturin/pull/862
|
|
|
|
|
// https://github.com/PyO3/maturin/issues/861
|
2025-09-27 22:13:38 -04:00
|
|
|
const crate_files = glob.sync(Array.from(cargo["package"]["include"]));
|
2024-10-24 17:17:18 -07:00
|
|
|
const wheel_dir = `../target/wheels`;
|
|
|
|
|
fs.mkdirSync(wheel_dir, { recursive: true });
|
|
|
|
|
await tar.create(
|
|
|
|
|
{
|
|
|
|
|
gzip: true,
|
|
|
|
|
file: path.join(wheel_dir, `perspective_python-${version}.tar.gz`),
|
|
|
|
|
prefix: `perspective_python-${version}`,
|
2024-11-08 16:57:53 -08:00
|
|
|
strict: true,
|
2024-10-24 17:17:18 -07:00
|
|
|
},
|
2025-09-27 22:13:38 -04:00
|
|
|
crate_files.concat(["PKG-INFO", data_dir]),
|
2024-10-24 17:17:18 -07:00
|
|
|
);
|
2024-07-28 21:12:49 -04:00
|
|
|
}
|
|
|
|
|
|
2025-05-04 13:25:10 -04:00
|
|
|
if (process.env["PSP_UV"] === "1") {
|
|
|
|
|
flags += " --uv";
|
2025-02-05 23:06:43 +00:00
|
|
|
}
|
|
|
|
|
|
2024-07-28 21:12:49 -04:00
|
|
|
if (!build_wheel && !build_sdist) {
|
2025-05-04 13:25:10 -04:00
|
|
|
const dev_features = ["abi3"];
|
|
|
|
|
cmd.sh(
|
|
|
|
|
`maturin develop --features=${dev_features.join(
|
2025-09-27 22:13:38 -04:00
|
|
|
",",
|
|
|
|
|
)} ${flags} ${target}`,
|
2025-05-04 13:25:10 -04:00
|
|
|
);
|
2024-07-09 00:06:23 -04:00
|
|
|
}
|
2024-07-08 17:27:03 -04:00
|
|
|
|
2024-10-24 17:17:18 -07:00
|
|
|
if (!cmd.isEmpty()) {
|
|
|
|
|
cmd.runSync();
|
|
|
|
|
}
|
2024-09-12 13:08:54 -04:00
|
|
|
|
2024-10-24 17:17:18 -07:00
|
|
|
// Generates version 2.3 according to https://packaging.python.org/en/latest/specifications/core-metadata/
|
|
|
|
|
// Takes parsed pyproject.toml, Cargo.toml, and contents of README.md.
|
|
|
|
|
function generatePkgInfo(pyproject, cargo, readme_md) {
|
|
|
|
|
const project = pyproject["project"];
|
|
|
|
|
const field = (name, value) => {
|
|
|
|
|
if (typeof value !== "string") {
|
|
|
|
|
throw new Error(
|
2025-09-27 22:13:38 -04:00
|
|
|
`PKG-INFO value for field ${name} was not a string:\n${value}`,
|
2024-10-24 17:17:18 -07:00
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
return `${name}: ${value}`;
|
|
|
|
|
};
|
|
|
|
|
const lines = [];
|
|
|
|
|
const addField = (key, value) => lines.push(field(key, value));
|
|
|
|
|
addField("Metadata-Version", "2.3");
|
|
|
|
|
addField("Name", project.name);
|
|
|
|
|
addField("Version", cargo.package.version);
|
|
|
|
|
for (const c of project["classifiers"]) {
|
|
|
|
|
addField("Classifier", c);
|
|
|
|
|
}
|
|
|
|
|
for (const [extra, deps] of Object.entries(
|
2025-09-27 22:13:38 -04:00
|
|
|
project["optional-dependencies"],
|
2024-10-24 17:17:18 -07:00
|
|
|
)) {
|
|
|
|
|
for (const dep of deps) {
|
|
|
|
|
addField("Requires-Dist", `${dep} ; extra == '${extra}'`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (const extra of Object.keys(project["optional-dependencies"])) {
|
|
|
|
|
addField("Provides-Extra", extra);
|
|
|
|
|
}
|
|
|
|
|
addField("Summary", cargo.package.description);
|
|
|
|
|
addField("Home-page", cargo.package.homepage);
|
|
|
|
|
addField("Author", cargo.package.authors[0]);
|
|
|
|
|
addField("Author-email", cargo.package.authors[0]);
|
|
|
|
|
addField("License", cargo.package.license);
|
|
|
|
|
addField("Requires-Python", project["requires-python"]);
|
|
|
|
|
addField(
|
|
|
|
|
"Description-Content-Type",
|
2025-09-27 22:13:38 -04:00
|
|
|
"text/markdown; charset=UTF-8; variant=GFM",
|
2024-10-24 17:17:18 -07:00
|
|
|
);
|
|
|
|
|
addField("Project-URL", `Source Code, ${cargo.package.repository}`);
|
|
|
|
|
lines.push("");
|
|
|
|
|
lines.push(readme_md);
|
|
|
|
|
|
|
|
|
|
return lines.join("\n");
|
2024-09-12 13:08:54 -04:00
|
|
|
}
|