Compare commits

..

No commits in common. "6af48cec0b0dca98a667b5a2585d6b7aa7cc94e3" and "f0381076a4c7dec6a85787855cde06593ab67d9b" have entirely different histories.

17 changed files with 20096 additions and 24776 deletions

File diff suppressed because it is too large Load diff

View file

@ -1,153 +1,87 @@
[project]
name = "baserow"
authors = [{ name = "Bram Wiepjes (Baserow)", email = "bram@baserow.io" }]
#summary = "Baserow: open source no-code database backend."
description = """Baserow is an open source no-code database tool and Airtable \
alternative. Easily create a relational database without any \
technical expertise. Build a table and define custom fields \
like text, number, file and many more."""
# mixed license actually
license = { file = "LICENSE" }
requires-python = ">=3.11"
dynamic = ["version", "dependencies"]
classifiers = []
[project.urls]
Homepage = "https://baserow.io"
"Bug Tracker" = "https://gitlab.com/baserow/baserow/-/issues/"
Documentation = "https://baserow.io/user-docs"
Support = "https://community.baserow.io/"
Changelog = "https://gitlab.com/baserow/baserow/-/issues/"
[project.scripts]
baserow = "baserow.manage:main"
[tool.black]
exclude = """
exclude = '''
/(
| migrations
| generated
)/
"""
# ensure consistent formatting for black
# see https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#required-version
# see https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#target-version
target-version = ["py311"]
required-version = "23"
[tool.setuptools]
include-package-data = true
# https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html
platforms = ["Linux"]
#[tool.setuptools.package-data]
#baserow.data.templates = ["*"]
[tool.setuptools.packages.find]
namespaces = true
where = ["src/"]
include = ["baserow", "baserow.*"]
[tool.setuptools.dynamic]
# version always from the code
version = { attr = "baserow.version.VERSION" }
dependencies = { file = ["deps/base.txt"] }
'''
[tool.poetry]
name = "baserow"
version = "1.29.1"
version = "1.15.1"
description = "Baserow: open source no-code database backend."
authors = ["Bram Wiepjes (Baserow) <bram@baserow.io>"]
description = """Baserow is an open source no-code database tool and Airtable \
alternative. Easily create a relational database without any \
technical expertise. Build a table and define custom fields \
like text, number, file and many more."""
readme = "README.md"
packages = [
{ include = "baserow", from = "src" },
]
[tool.poetry.dependencies]
python = ">=3.11,<4.0"
django = "^5.0.9"
django-cors-headers = "^4.3.1"
djangorestframework = "^3.15.1"
djangorestframework-simplejwt = "^5.3.1"
psycopg2 = "^2.9.9"
faker = "^25.0.1"
twisted = "^24.3.0"
gunicorn = "^22.0.0"
websockets = "^12.0"
requests = "^2.31.0"
itsdangerous = "^2.2.0"
pillow = "^10.3.0"
drf-spectacular = "^0.27.2"
asgiref = "^3.8.1"
channels-redis = "^4.1.0"
django-redis = "^5.4.0"
django-celery-email = "^3.0.0"
advocate = "^1.0.0"
zipp = "^3.18.1"
unicodecsv = "^0.14.1"
django-celery-beat = "^2.6.0"
celery-redbeat = "^2.2.0"
flower = "^2.0.1"
service-identity = "^24.1.0"
regex = "^2024.4.28"
antlr4-python3-runtime = "^4.9.3"
tqdm = "^4.66.4"
boto3 = "^1.34.98"
django-storages = {version = "^1.14.3", extras = ["azure", "google"]}
django-health-check = "^3.18.2"
psutil = "^5.9.8"
dj-database-url = "^2.1.0"
redis = "^5.0.4"
pysaml2 = "^7.5.0"
validators = "^0.28.1"
requests-oauthlib = "^2.0.0"
opentelemetry-api = "^1.24.0"
opentelemetry-exporter-otlp-proto-http = "^1.24.0"
opentelemetry-instrumentation = "^0.45b0"
opentelemetry-instrumentation-django = "^0.45b0"
opentelemetry-instrumentation-aiohttp-client = "^0.45b0"
opentelemetry-instrumentation-asgi = "^0.45b0"
opentelemetry-instrumentation-botocore = "^0.45b0"
opentelemetry-instrumentation-celery = "^0.45b0"
opentelemetry-instrumentation-dbapi = "^0.45b0"
opentelemetry-instrumentation-grpc = "^0.45b0"
opentelemetry-instrumentation-logging = "^0.45b0"
opentelemetry-instrumentation-psycopg2 = "^0.45b0"
opentelemetry-instrumentation-redis = "^0.45b0"
opentelemetry-instrumentation-requests = "^0.45b0"
opentelemetry-instrumentation-wsgi = "^0.45b0"
opentelemetry-proto = "^1.24.0"
opentelemetry-sdk = "^1.24.0"
opentelemetry-semantic-conventions = "^0.45b0"
opentelemetry-util-http = "^0.45b0"
brotli = "^1.1.0"
loguru = "^0.7.2"
django-cachalot = "^2.6.2"
celery-singleton = "^0.3.1"
posthog = "^3.5.0"
rich = "^13.7.1"
tzdata = "^2024.1"
sentry-sdk = "^2.0.1"
typing-extensions = "^4.11.0"
ollama = "^0.1.9"
langchain = "^0.1.17"
openai = "^1.30.1"
anthropic = "^0.37.1"
mistralai = "^1.1.0"
jsonschema = "^4.17.3"
icalendar = "^5.0.12"
jira2markdown = "^0.3.7"
prosemirror = {url = "https://github.com/fellowapp/prosemirror-py/archive/refs/tags/v0.3.5.zip"}
uvicorn = {version = "^0.29.0", extras = ["standard"]}
channels = {version = "^4.0.0", extras = ["daphne"]}
celery = {version = "^5.4.0", extras = ["redis"]}
[tool.poetry.scripts]
baserow = 'baserow.manage:main'
[tool.poetry.dependencies]
python = "^3.10"
django = "3.2.18"
django-cors-headers = "3.14.0"
djangorestframework = "3.14.0"
djangorestframework-simplejwt = "5.2.2"
psycopg2 = "2.9.5"
faker = "8.11.0"
twisted = "22.10.0"
gunicorn = "20.1.0"
uvicorn = {version = "0.20.0", extras = ["standard"]}
websockets = "10.4"
requests = "2.28.2"
itsdangerous = "2.1.2"
pillow = "9.0.0"
drf-spectacular = "0.25.1"
asgiref = "3.6.0"
channels = {version = "4.0.0", extras = ["daphne"]}
channels-redis = "4.0.0"
celery = {version = "5.2.7", extras = ["redis"]}
django-redis = "5.2.0"
django-celery-email = "3.0.0"
advocate = "1.0.0"
zipp = "3.5.0"
unicodecsv = "0.14.1"
django-celery-beat = "2.4.0"
celery-redbeat = "2.0.0"
service-identity = "21.1.0"
regex = "2021.8.3"
antlr4-python3-runtime = "4.9.3"
tqdm = "4.64.1"
boto3 = "1.26.83"
django-storages = "1.12.3"
django-health-check = "3.16.5"
psutil = "5.9.4"
dj-database-url = "0.5.0"
redis = "4.5.1"
pysaml2 = "7.4.1"
validators = "0.20.0"
requests-oauthlib = "1.3.1"
opentelemetry-api = "1.15.0"
opentelemetry-exporter-otlp-proto-http = "1.15.0"
opentelemetry-instrumentation = "0.36b0"
opentelemetry-instrumentation-django = "0.36b0"
opentelemetry-instrumentation-aiohttp-client = "0.36b0"
opentelemetry-instrumentation-asgi = "0.36b0"
opentelemetry-instrumentation-botocore = "0.36b0"
opentelemetry-instrumentation-celery = "0.36b0"
opentelemetry-instrumentation-dbapi = "0.36b0"
opentelemetry-instrumentation-grpc = "0.36b0"
opentelemetry-instrumentation-logging = "0.36b0"
opentelemetry-instrumentation-psycopg2 = "0.36b0"
opentelemetry-instrumentation-redis = "0.36b0"
opentelemetry-instrumentation-requests = "0.36b0"
opentelemetry-instrumentation-wsgi = "0.36b0"
opentelemetry-proto = "1.15.0"
opentelemetry-sdk = "1.15.0"
opentelemetry-semantic-conventions = "0.36b0"
opentelemetry-util-http = "0.36b0"
brotli = "1.0.9"
loguru = "0.6.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View file

@ -1,26 +1,23 @@
{ depot
, fetchFromGitLab
, pkgs
, mkYarnPackage
, fetchYarnDeps
, lib
, stdenv
, makeWrapper
, python312
, nodejs_22
, python311
, nodejs-18_x
, ossOnly ? true
}:
let
python3 = python312;
nodejs = nodejs_22;
version = "1.29.1";
python3 = python311;
version = "1.16.0";
suffix = lib.optionalString ossOnly "-oss";
src' = fetchFromGitLab {
owner = "baserow";
repo = "baserow";
rev = version;
hash = "sha256-EdSrcs2+jiURpf3Nrv5FJzQkkJ2mcA+UY8ceSkeLOvU=";
sha256 = "02ij94cqk5f0lsy8qiz0zrm9087swji9lxqbzy64jf1drqsdjj7a";
};
src = if ossOnly then pkgs.runCommand "${src'.name}${suffix}" {} ''
cp -R ${src'} $out
@ -29,23 +26,30 @@ let
rm -rf $out/enterprise
sed -i -e '/baserow_premium/d' -e '/baserow_enterprise/d' $out/backend/src/baserow/config/settings/base.py
sed -i -e '/premium/d' -e '/enterprise/d' $out/web-frontend/config/nuxt.config.base.js
'' else src';
web-frontend-deps = import ./web-frontend {
inherit pkgs nodejs;
inherit (pkgs) system;
src = "${src}/web-frontend";
};
nodejs = nodejs-18_x;
inherit (depot.third_party) poetry2nix;
poetry2nixOverrides = poetry2nix.defaultPoetryOverrides.overrideOverlay (self: super: let
addBuildInputs = f: buildInputs: f.overridePythonAttrs (old: {
buildInputs = (old.buildInputs or []) ++ buildInputs;
});
in {
#kombu = let
# kombuVersion = "5.2.4";
#in assert lib.assertMsg (super.kombu.version == kombuVersion) "kombu (${super.kombu.version}) is different version to what I expected (${kombuVersion}); maybe remove the override?"; super.kombu.overridePythonAttrs (old: {
# buildInputs = (old.buildInputs or []) ++ [ self.setuptools ];
# postPatch = ''
# ${old.postPatch or ""}
# substituteInPlace requirements/test.txt --replace "pytz>dev" "pytz"
# '';
#});
kombu = let
kombuVersion = "5.2.4";
in assert lib.assertMsg (super.kombu.version == kombuVersion) "kombu (${super.kombu.version}) is different version to what I expected (${kombuVersion}); maybe remove the override?"; super.kombu.overridePythonAttrs (old: {
buildInputs = (old.buildInputs or []) ++ [ self.setuptools ];
postPatch = ''
${old.postPatch or ""}
substituteInPlace requirements/test.txt --replace "pytz>dev" "pytz"
'';
});
opentelemetry-instrumentation-aiohttp = addBuildInputs super.opentelemetry-instrumentation-aiohttp [ self.hatchling ];
opentelemetry-instrumentation-aiohttp-client = addBuildInputs super.opentelemetry-instrumentation-aiohttp-client [ self.hatchling ];
opentelemetry-instrumentation-botocore = addBuildInputs super.opentelemetry-instrumentation-botocore [ self.hatchling ];
@ -58,7 +62,6 @@ let
opentelemetry-instrumentation-redis = addBuildInputs super.opentelemetry-instrumentation-redis [ self.hatchling ];
opentelemetry-instrumentation-requests = addBuildInputs super.opentelemetry-instrumentation-requests [ self.hatchling ];
opentelemetry-instrumentation-wsgi = addBuildInputs super.opentelemetry-instrumentation-wsgi [ self.hatchling ];
opentelemetry-propagator-aws-xray = addBuildInputs super.opentelemetry-propagator-aws-xray [ self.setuptools ];
django-health-check = super.django-health-check.overridePythonAttrs (old: {
buildInputs = (old.buildInputs or []) ++ [
self.sphinx
@ -69,11 +72,9 @@ let
'';
});
pysaml2 = addBuildInputs super.pysaml2 [ self.poetry-core ];
jira2markdown = addBuildInputs super.jira2markdown [ self.poetry-core ];
mistralai = addBuildInputs super.mistralai [ self.poetry-core ];
cython = python3.pkgs.cython_0;
pystemmer = addBuildInputs super.pystemmer [ self.cython ];
pytest-runner = null;
tokenizers = null;
uvloop = super.uvloop.overridePythonAttrs (old: {
nativeBuildInputs = (old.nativeBuildInputs or []) ++ [
self.pythonRelaxDepsHook
@ -81,24 +82,6 @@ let
];
pythonRemoveDeps = [ "pytest-runner" ];
});
orjson = super.orjson.override { preferWheel = true; };
jiter = super.jiter.override { preferWheel = true; };
numpy = python3.pkgs.numpy;
django-cachalot = addBuildInputs super.django-cachalot [ self.setuptools ];
celery-singleton = super.celery-singleton.overridePythonAttrs (old: {
postPatch = ''
substituteInPlace pyproject.toml \
--replace-fail "poetry.masonry.api" "poetry.core.masonry.api"
'';
buildInputs = (old.buildInputs or []) ++ [ self.poetry-core ];
});
prosemirror = super.prosemirror.overridePythonAttrs (old: {
buildInputs = (old.buildInputs or []) ++ [ self.poetry-core ];
nativeBuildInputs = (old.nativeBuildInputs or []) ++ [
pkgs.unzip
];
});
anthropic = addBuildInputs super.anthropic [ self.hatch-fancy-pypi-readme ];
});
mkBackendSrc = { type, fromDir, pyproject, poetrylock, extra ? "" }: pkgs.runCommand "baserow-${type}-src" {
@ -110,6 +93,7 @@ let
cp $pyproject $out/pyproject.toml
cp $poetrylock $out/poetry.lock
cp $src/README.md $out
rm $out/setup.py
${extra}
'';
mkBackendApp = {
@ -160,94 +144,50 @@ let
});
in
{
inherit src;
inherit src web-frontend-deps;
web-frontend = mkYarnPackage {
web-frontend = stdenv.mkDerivation {
name = "baserow${suffix}-web-frontend";
inherit src nodejs;
sourceRoot = "source${lib.optionalString ossOnly "-oss"}/web-frontend";
# upgraded nan to work with our newer node
packageJSON = ./web-frontend/package.json;
yarnLock = ./web-frontend/yarn.lock;
offlineCache = fetchYarnDeps {
yarnLock = ./web-frontend/yarn.lock;
hash = "sha256-AILUgSCvqBtl5sEA5/k1ZCV0WhZMeJUlZL/frDbaWPo=";
};
inherit src version;
buildInputs = [ nodejs ];
nativeBuildInputs = [ makeWrapper ];
env.BASEROW_OSS_ONLY = ossOnly;
env.NUXT_TELEMETRY_DISABLED = true;
pkgConfig = {
node-sass = {
buildInputs = with pkgs; [ python3 python3.pkgs.distutils libsass pkg-config ];
postInstall = ''
mkdir -p $HOME/.node-gyp/${nodejs.version}
echo 9 > $HOME/.node-gyp/${nodejs.version}/installVersion
ln -sfv ${nodejs}/include $HOME/.node-gyp/${nodejs.version}
export npm_config_nodedir=${nodejs}
LIBSASS_EXT=auto yarn --offline run build
rm build/config.gypi
'';
};
};
postPatch = ''
substituteInPlace package.json \
--replace-fail '"node": ' '"_node_ignored_": '
'';
# Have to override configurePhase since we have a monorepo structure.
configurePhase = ''
runHook preConfigure
cp -r $node_modules node_modules
chmod -R +w node_modules
runHook postConfigure
'';
nodeDependencies = web-frontend-deps.shell.nodeDependencies;
buildPhase = ''
runHook preBuild
export HOME=$(mktemp -d)
yarn --offline build
runHook postBuild
'';
doDist = false;
installPhase = ''
runHook preInstall
outpath="$out/share/baserow"
mkdir -p "$outpath"
cp -R . "$outpath/web-frontend"
rm -rf "$outpath/web-frontend/node_modules"
ln -sf "$node_modules" "$outpath/web-frontend/node_modules"
mkdir -p $outpath
cp -R web-frontend $outpath/web-frontend
${lib.optionalString (!ossOnly) ''
mkdir -p $outpath/premium
cp -R ../premium/web-frontend $outpath/premium/web-frontend
cp -R premium/web-frontend $outpath/premium/web-frontend
mkdir -p $outpath/enterprise
cp -R ../enterprise/web-frontend $outpath/enterprise/web-frontend
cp -R enterprise/web-frontend $outpath/enterprise/web-frontend
''}
mkdir $out/bin
makeWrapper $node_modules/nuxt/bin/nuxt.js $out/bin/baserow-web-frontend \
--run "cd $outpath/web-frontend" \
--set BASEROW_OSS_ONLY "${if ossOnly then "true" else "false"}" \
--add-flags "start --config-file config/nuxt.config.prod.js"
# Disable prompts
export MINIMAL=true
export NODE_OPTIONS=--openssl-legacy-provider
runHook postInstall
pushd $outpath/web-frontend
mkdir node_modules
for f in $nodeDependencies/lib/node_modules/*; do
ln -s "$f" ./node_modules
done
export PATH="$nodeDependencies/bin:$PATH"
./node_modules/nuxt/bin/nuxt.js build --config-file config/nuxt.config.local.js
popd
mkdir $out/bin
makeWrapper $nodeDependencies/lib/node_modules/nuxt/bin/nuxt.js $out/bin/baserow-web-frontend \
--run "cd $outpath/web-frontend" \
--add-flags "start --config-file config/nuxt.config.local.js"
runHook postBuild
'';
dontInstall = true;
};
backend = backendEnv;

View file

@ -1,7 +1,7 @@
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
package = []
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "81b2fa642d7f2d1219cf80112ace12d689d053d81be7f7addb98144d56fc0fb2"
python-versions = "*"
content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8"

View file

@ -1,57 +1,12 @@
[project]
name = "baserow-enterprise"
authors = [{ name = "Bram Wiepjes (Baserow)", email="bram@baserow.io" }]
description="""Baserow is an open source no-code database tool and Airtable \
alternative. Easily create a relational database without any \
technical expertise. Build a table and define custom fields \
like text, number, file and many more."""
# mixed license
license={file="LICENSE"}
requires-python=">=3.11"
version = "1.29.1"
classifiers = []
[project.urls]
Homepage = "https://baserow.io"
"Bug Tracker" = "https://gitlab.com/baserow/baserow/-/issues/"
Documentation = "https://baserow.io/user-docs"
Support = "https://community.baserow.io/"
Changelog = "https://gitlab.com/baserow/baserow/-/issues/"
[tool.black]
exclude = '''
/(
| migrations
| generated
)/
'''
[tool.setuptools]
include-package-data = true
# https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html
platforms = ["Linux"]
[tool.setuptools.packages.find]
where = ["src/"]
include = ["baserow_enterprise", "baserow_enterprise.*"]
[tool.poetry]
name = "baserow-enterprise"
version = "1.29.1"
description = ""
authors = ["Your Name <you@example.com>"]
readme = "README.md"
name = "baserow_enterprise"
version = "1.15.1"
description = "Baserow: open source no-code database backend."
authors = ["Bram Wiepjes (Baserow) <bram@baserow.io>"]
packages = [
{ include = "baserow_enterprise", from = "src" },
]
[tool.poetry.dependencies]
python = "^3.11"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View file

@ -1,7 +1,7 @@
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
package = []
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "81b2fa642d7f2d1219cf80112ace12d689d053d81be7f7addb98144d56fc0fb2"
python-versions = "*"
content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8"

View file

@ -1,55 +1,12 @@
[project]
name = "baserow-premium"
authors = [{ name = "Bram Wiepjes (Baserow)", email = "bram@baserow.io" }]
description = """Baserow is an open source no-code database tool and Airtable \
alternative. Easily create a relational database without any \
technical expertise. Build a table and define custom fields \
like text, number, file and many more."""
# mixed license
license={file="LICENSE"}
requires-python=">=3.11"
version = "1.29.1"
classifiers = []
[project.urls]
Homepage = "https://baserow.io"
"Bug Tracker" = "https://gitlab.com/baserow/baserow/-/issues/"
Documentation = "https://baserow.io/user-docs"
Support = "https://community.baserow.io/"
Changelog = "https://gitlab.com/baserow/baserow/-/issues/"
[tool.black]
exclude = '''
/(
| migrations
| generated
)/
'''
[tool.setuptools]
include-package-data = true
# https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html
platforms = ["Linux"]
[tool.setuptools.packages.find]
where = ["src/"]
include = ["baserow_premium", "baserow_premium.*"]
[tool.poetry]
name = "baserow-premium"
version = "1.29.1"
description = ""
authors = ["Your Name <you@example.com>"]
readme = "README.md"
name = "baserow_premium"
version = "1.15.1"
description = "Baserow: open source no-code database backend."
authors = ["Bram Wiepjes (Baserow) <bram@baserow.io>"]
packages = [
{ include = "baserow_premium", from = "src" },
]
[tool.poetry.dependencies]
python = "^3.11"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View file

@ -0,0 +1,20 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-12_x", src}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs;
nodejs = nodejs // {
python = pkgs.python310;
};
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit src nodeEnv;
}

View file

@ -0,0 +1,689 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load diff

View file

@ -1,147 +0,0 @@
{
"name": "baserow",
"version": "1.29.1",
"private": true,
"description": "Baserow: open source no-code database web frontend.",
"author": "Bram Wiepjes (Baserow)",
"license": "MIT",
"scripts": {
"build": "NODE_ENV=production nuxt build",
"build-local": "NODE_ENV=production nuxt build --config-file ./config/nuxt.config.local.js",
"dev": "nuxt --hostname 0.0.0.0",
"start": "nuxt start --hostname 0.0.0.0",
"eslint": "eslint -c .eslintrc.js --ext .js,.vue . ../premium/web-frontend ../enterprise/web-frontend",
"lint": "yarn eslint && yarn stylelint",
"fix": "yarn prettier **/*.js **/*.vue **/*.scss -w && yarn eslint --fix && yarn stylelint --fix",
"stylelint": "stylelint **/*.scss ../premium/web-frontend/modules/**/*.scss ../enterprise/web-frontend/modules/**/*.scss",
"test-core": "jest --verbose false --config jest.config.js",
"test-premium": "jest --verbose false --config ../premium/web-frontend/jest.config.js",
"test-enterprise": "jest --verbose false --config ../enterprise/web-frontend/jest.config.js",
"test": "yarn test-core && yarn test-premium && yarn test-enterprise",
"test-core-coverage": "JEST_JUNIT_OUTPUT_DIR=../reports/ jest --config jest.config.js -i --verbose --ci --forceExit --collectCoverage --coverageDirectory=\"./reports/coverage/\"",
"test-premium-coverage": "jest --config ../premium/web-frontend/jest.config.js -i --verbose --ci --forceExit --collectCoverage --coverageDirectory=\"./reports/coverage/\"",
"test-enterprise-coverage": "jest --config ../enterprise/web-frontend/jest.config.js -i --verbose --ci --forceExit --collectCoverage --coverageDirectory=\"./reports/coverage/\"",
"test-coverage": "yarn test-core-coverage && yarn test-premium-coverage && yarn test-enterprise-coverage",
"storybook": "nuxt storybook -s modules/core/static -p 6006",
"test-storybook": "test-storybook"
},
"dependencies": {
"@nuxtjs/i18n": "7.3.1",
"@nuxtjs/sentry": "7.1.11",
"@storybook/core-client": "6.5.9",
"@tiptap/core": "^2.0.3",
"@tiptap/extension-blockquote": "^2.2.2",
"@tiptap/extension-bold": "^2.2.2",
"@tiptap/extension-bubble-menu": "^2.2.3",
"@tiptap/extension-bullet-list": "^2.2.2",
"@tiptap/extension-code": "^2.2.2",
"@tiptap/extension-code-block": "^2.2.2",
"@tiptap/extension-document": "2.2.3",
"@tiptap/extension-dropcursor": "^2.2.4",
"@tiptap/extension-floating-menu": "^2.2.4",
"@tiptap/extension-gapcursor": "^2.2.4",
"@tiptap/extension-hard-break": "2.2.3",
"@tiptap/extension-heading": "^2.0.3",
"@tiptap/extension-highlight": "^2.2.2",
"@tiptap/extension-history": "^2.2.4",
"@tiptap/extension-horizontal-rule": "^2.2.2",
"@tiptap/extension-image": "^2.2.4",
"@tiptap/extension-italic": "^2.2.2",
"@tiptap/extension-link": "^2.2.4",
"@tiptap/extension-list-item": "^2.2.2",
"@tiptap/extension-mention": "2.2.3",
"@tiptap/extension-ordered-list": "^2.2.2",
"@tiptap/extension-paragraph": "2.2.3",
"@tiptap/extension-placeholder": "2.2.3",
"@tiptap/extension-strike": "^2.2.3",
"@tiptap/extension-subscript": "^2.2.3",
"@tiptap/extension-superscript": "^2.2.3",
"@tiptap/extension-task-item": "^2.2.4",
"@tiptap/extension-task-list": "^2.2.4",
"@tiptap/extension-text": "2.2.3",
"@tiptap/extension-underline": "^2.2.3",
"@tiptap/pm": "2.2.3",
"@tiptap/suggestion": "2.2.3",
"@tiptap/vue-2": "2.2.3",
"antlr4": "4.9.3",
"async-mutex": "0.4.0",
"axios": "^1.7.4",
"bignumber.js": "9.1.1",
"chart.js": "3.9.1",
"chartjs-adapter-moment": "1.0.1",
"cookie-universal-nuxt": "2.2.2",
"cross-env": "^7.0.2",
"flush-promises": "^1.0.2",
"iconoir": "^6.11.0",
"jwt-decode": "^3.1.2",
"lodash": "^4.17.21",
"markdown-it": "13.0.1",
"markdown-it-regexp": "^0.4.0",
"markdown-it-task-lists": "^2.1.1",
"moment": "^2.30.1",
"moment-guess": "^1.2.4",
"moment-timezone": "0.5.43",
"node-sass": "8.0.0",
"normalize-scss": "^7.0.1",
"nuxt": "2.17.2",
"papaparse": "5.4.1",
"path-to-regexp": "^1.8.0",
"posthog-js": "^1.136.2",
"resize-observer-polyfill": "^1.5.1",
"sass-loader": "10.4.1",
"thenby": "^1.3.4",
"tiptap-markdown": "^0.8.9",
"tldjs": "^2.3.1",
"uuid": "9.0.0",
"vue-chartjs": "4.1.2",
"vue2-smooth-scroll": "^1.6.0",
"vuejs-datepicker": "1.6.2",
"vuelidate": "0.7.7"
},
"devDependencies": {
"@babel/core": "7.23.6",
"@babel/eslint-parser": "7.23.3",
"@babel/preset-env": "7.23.6",
"@nuxtjs/eslint-config": "12.0.0",
"@nuxtjs/storybook": "^4.3.2",
"@nuxtjs/stylelint-module": "^4.2.2",
"@nuxtjs/svg": "^0.4.1",
"@storybook/addon-coverage": "^0.0.8",
"@storybook/test-runner": "^0.11.0",
"@vue/test-utils": "1.3.4",
"@vue/vue2-jest": "29.2.3",
"axios-mock-adapter": "1.21.4",
"babel-jest": "29.5.0",
"css-loader": "5.2.0",
"eslint": "8.37.0",
"eslint-config-prettier": "8.8.0",
"eslint-config-standard": "17.0.0",
"eslint-loader": "^4.0.2",
"eslint-plugin-import": "2.27.5",
"eslint-plugin-jest": "27.2.1",
"eslint-plugin-node": ">=8.0.1",
"eslint-plugin-nuxt": "4.0.0",
"eslint-plugin-prettier": "4.2.1",
"eslint-plugin-promise": "6.1.1",
"eslint-plugin-standard": ">=4.0.0",
"eslint-plugin-vue": "9.10.0",
"jest": "29.5.0",
"jest-environment-jsdom": "^29.5.0",
"jest-junit": "15.0.0",
"jest-serializer-vue": "3.1.0",
"jsdom": "21.1.1",
"jsdom-global": "^3.0.2",
"node-mocks-http": "1.12.2",
"nodemon": "2.0.22",
"postcss": "8",
"prettier": "2.8.7",
"storybook-addon-designs": "^6.3.1",
"storybook-addon-pseudo-states": "1.15.5",
"stylelint": "^16.3.1",
"stylelint-config-standard-scss": "^13.1.0",
"stylelint-selector-bem-pattern": "^4.0.0"
},
"resolutions": {
"nan": "2.22.0"
}
}

View file

@ -0,0 +1,19 @@
#!/usr/bin/env nix-shell
#!nix-shell -p nodePackages.node2nix -i bash
set -euo pipefail
BASEROW_SRC="$(nix-build $HOME/depot --no-out-link -A nix.pkgs.baserow.src)"
node2nix --development -i "$BASEROW_SRC/web-frontend/package.json" --nodejs-12
# Replace args.src with an explicit arg in node-packages.nix.
sed -E -i \
-e 's,^(\s*)src = [^{]+;,\1inherit src;,' \
-e 's|^(\{.*)\}:|\1, src}:|' \
./node-packages.nix
# Pass in src into node-packages.nix
sed -E -i \
-e 's,inherit nodeEnv;,inherit src nodeEnv;,' \
-e 's|}:$|, src}:|' \
./default.nix

File diff suppressed because it is too large Load diff

View file

@ -78,12 +78,12 @@ let
poetry2nixSrcRaw = nixpkgs.fetchFromGitHub {
owner = "nix-community";
repo = "poetry2nix";
rev = "f554d27c1544d9c56e5f1f8e2b8aff399803674e"; # 2024.11.109713
hash = "sha256-F7N1mxH1VrkVNHR3JGNMRvp9+98KYO4b832KS8Gl2xI=";
rev = "3c92540611f42d3fb2d0d084a6c694cd6544b609";
hash = "sha256:1jfrangw0xb5b8sdkimc550p3m98zhpb1fayahnr7crg74as4qyq";
};
poetry2nixSrc = nixpkgs.runCommand "poetry2nix-patched" {
patches = [
./poetry2nix-rpds-py-0.21.0.patch
./poetry2nix-cryptography-42.0.4.patch
];
src = poetry2nixSrcRaw;
} ''

View file

@ -3,14 +3,14 @@
"alpha": {
"experimental": {
"candidateHashFilenames": [
"factorio_linux_2.0.19.tar.xz"
"factorio_linux_2.0.16.tar.xz"
],
"name": "factorio_alpha_x64-2.0.19.tar.xz",
"name": "factorio_alpha_x64-2.0.16.tar.xz",
"needsAuth": true,
"sha256": "059c34232ef3dcb50cec672198a0dfaf5ce32c892e227a79dd6d06b443dd394e",
"sha256": "9828ae257a3b2f95de2dae2f262e9d8d6b85f356911449166ceef1472d231e6d",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.19/alpha/linux64",
"version": "2.0.19"
"url": "https://factorio.com/get-download/2.0.16/alpha/linux64",
"version": "2.0.16"
},
"stable": {
"candidateHashFilenames": [
@ -51,14 +51,14 @@
"expansion": {
"experimental": {
"candidateHashFilenames": [
"factorio-space-age_linux_2.0.19.tar.xz"
"factorio-space-age_linux_2.0.16.tar.xz"
],
"name": "factorio_expansion_x64-2.0.19.tar.xz",
"name": "factorio_expansion_x64-2.0.16.tar.xz",
"needsAuth": true,
"sha256": "e8e7b4ff19df0678806bea8fba68823f863458169050ad3f4ce0a4ae9ae37e7f",
"sha256": "32ae1b8f525148b3bb1f68e41b398543c2b0da29734f9f3b4f9509a86c64ecf4",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.19/expansion/linux64",
"version": "2.0.19"
"url": "https://factorio.com/get-download/2.0.16/expansion/linux64",
"version": "2.0.16"
},
"stable": {
"candidateHashFilenames": [
@ -75,15 +75,15 @@
"headless": {
"experimental": {
"candidateHashFilenames": [
"factorio-headless_linux_2.0.19.tar.xz",
"factorio_headless_x64_2.0.19.tar.xz"
"factorio-headless_linux_2.0.16.tar.xz",
"factorio_headless_x64_2.0.16.tar.xz"
],
"name": "factorio_headless_x64-2.0.19.tar.xz",
"name": "factorio_headless_x64-2.0.16.tar.xz",
"needsAuth": false,
"sha256": "2e27aca3a7f65b50916d14a62203b6861cbe657e8d2dbd8f813e0a606efce9c7",
"sha256": "f2069b4b746500d945eeb67ef7eda5e7aebe7fd0294c2af4e117af22a3bbaea3",
"tarDirectory": "x64",
"url": "https://factorio.com/get-download/2.0.19/headless/linux64",
"version": "2.0.19"
"url": "https://factorio.com/get-download/2.0.16/headless/linux64",
"version": "2.0.16"
},
"stable": {
"candidateHashFilenames": [

View file

@ -0,0 +1,12 @@
diff --git a/overrides/default.nix b/overrides/default.nix
index c0f6dab200...ccbdedc947 100644
--- a/overrides/default.nix
+++ b/overrides/default.nix
@@ -559,6 +559,7 @@
"42.0.1" = "sha256-Kq/TSoI1cm9Pwg5CulNlAADmxdq0oWbgymHeMErUtcE=";
"42.0.2" = "sha256-jw/FC5rQO77h6omtBp0Nc2oitkVbNElbkBUduyprTIc=";
"42.0.3" = "sha256-QBZLGXdQz2WIBlAJM+yBk1QgmfF4b3G0Y1I5lZmAmtU=";
+ "42.0.4" = "sha256-qaXQiF1xZvv4sNIiR2cb5TfD7oNiYdvUwcm37nh2P2M=";
}.${version} or (
lib.warn "Unknown cryptography version: '${version}'. Please update getCargoHash." lib.fakeHash
);

View file

@ -1,12 +0,0 @@
diff --git a/overrides/default.nix b/overrides/default.nix
index 5f5130c..851c299 100644
--- a/overrides/default.nix
+++ b/overrides/default.nix
@@ -3296,6 +3296,7 @@ lib.composeManyExtensions [
"0.19.1" = "sha256-qIXdoCEVGCGUnTicZp4bUTJyGpFy9dwWY03lXUbxiHg=";
"0.20.0" = "sha256-5vbR2EbrAPJ8pb78tj/+r9nOWgQDT5aO/LUQI4kAGjU=";
"0.20.1" = "sha256-vqJCGlp5S2wECfgleCexCb9xegA8b6wo7YNBbcsbXqk=";
+ "0.21.0" = "sha256-VOmMNEdKHrPKJzs+D735Y52y47MubPwLlfkvB7Glh14=";
}.${version} or (
lib.warn "Unknown rpds-py version: '${version}'. Please update getCargoHash." lib.fakeHash
);