diff --git a/flake.lock b/flake.lock index 203568a..d052ebf 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "nixpkgs": { "locked": { - "lastModified": 1726463316, - "narHash": "sha256-gI9kkaH0ZjakJOKrdjaI/VbaMEo9qBbSUl93DnU7f4c=", + "lastModified": 1729256560, + "narHash": "sha256-/uilDXvCIEs3C9l73JTACm4quuHUsIHcns1c+cHUJwA=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172", + "rev": "4c2fcb090b1f3e5b47eaa7bd33913b574a11e0a0", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 929608e..16c72e7 100644 --- a/flake.nix +++ b/flake.nix @@ -23,7 +23,10 @@ in { api = pkgs.mkShell { - packages = with pkgs; [ postgresql_16 ]; + packages = with pkgs; [ + postgresql_16 + postgrest + ]; shellHook = '' alias dbmate="${pkgs.dbmate}/bin/dbmate --no-dump-schema --url postgres://postgres@localhost:15432/archtika?sslmode=disable" alias formatsql="${pkgs.pgformatter}/bin/pg_format -s 2 -f 2 -U 2 -i db/migrations/*.sql" @@ -49,6 +52,8 @@ dev-vm = self.nixosConfigurations.dev-vm.config.system.build.vm; default = pkgs.callPackage ./nix/package.nix { }; + + docker = pkgs.callPackage ./nix/docker.nix { }; } ); @@ -62,8 +67,12 @@ type = "app"; program = "${pkgs.writeShellScriptBin "api-setup" '' JWT_SECRET=$(tr -dc 'A-Za-z0-9' < /dev/urandom | head -c64) + WEBSITE_MAX_STORAGE_SIZE=100 + WEBSITE_MAX_NUMBER_USER=3 ${pkgs.postgresql_16}/bin/psql postgres://postgres@localhost:15432/archtika -c "ALTER DATABASE archtika SET \"app.jwt_secret\" TO '$JWT_SECRET'" + ${pkgs.postgresql_16}/bin/psql postgres://postgres@localhost:15432/archtika -c "ALTER DATABASE archtika SET \"app.website_max_storage_size\" TO $WEBSITE_MAX_STORAGE_SIZE" + ${pkgs.postgresql_16}/bin/psql postgres://postgres@localhost:15432/archtika -c "ALTER DATABASE archtika SET \"app.website_max_number_user\" TO $WEBSITE_MAX_NUMBER_USER" ${pkgs.dbmate}/bin/dbmate --url postgres://postgres@localhost:15432/archtika?sslmode=disable --migrations-dir ${self.outPath}/rest-api/db/migrations up diff --git a/nix/deploy/qs/default.nix b/nix/deploy/qs/default.nix index 22e14dc..fad8b18 100644 --- a/nix/deploy/qs/default.nix +++ b/nix/deploy/qs/default.nix @@ -15,6 +15,10 @@ acmeEmail = "thilo.hohlt@tutanota.com"; dnsProvider = "porkbun"; dnsEnvironmentFile = /var/lib/porkbun.env; - disableRegistration = true; + settings = { + disableRegistration = true; + maxWebsiteStorageSize = 250; + maxUserWebsites = 3; + }; }; } diff --git a/nix/dev-vm.nix b/nix/dev-vm.nix index 33518f4..42aef6c 100644 --- a/nix/dev-vm.nix +++ b/nix/dev-vm.nix @@ -24,6 +24,9 @@ virtualisation = { graphics = false; + memorySize = 2048; + cores = 2; + diskSize = 10240; sharedDirectories = { websites = { source = "/var/www/archtika-websites"; @@ -49,6 +52,15 @@ postgresql = { enable = true; package = pkgs.postgresql_16; + /* + PL/Perl: + overrideAttrs ( + finalAttrs: previousAttrs: { + buildInputs = previousAttrs.buildInputs ++ [ pkgs.perl ]; + configureFlags = previousAttrs.configureFlags ++ [ "--with-perl" ]; + } + ); + */ ensureDatabases = [ "archtika" ]; authentication = lib.mkForce '' local all all trust @@ -59,6 +71,11 @@ }; nginx = { enable = true; + recommendedProxySettings = true; + recommendedTlsSettings = true; + recommendedZstdSettings = true; + recommendedOptimisation = true; + virtualHosts."_" = { listen = [ { @@ -67,13 +84,15 @@ } ]; locations = { + "/previews/" = { + alias = "/var/www/archtika-websites/previews/"; + index = "index.html"; + tryFiles = "$uri $uri/ $uri.html =404"; + }; "/" = { root = "/var/www/archtika-websites"; index = "index.html"; tryFiles = "$uri $uri/ $uri.html =404"; - extraConfig = '' - autoindex on; - ''; }; }; }; diff --git a/nix/docker.nix b/nix/docker.nix new file mode 100644 index 0000000..4393df0 --- /dev/null +++ b/nix/docker.nix @@ -0,0 +1,45 @@ +{ + pkgs, + ... +}: + +# Behaviour of the Nix module needs to be replicated, which includes PostgreSQL, NGINX, ACME (DNS01), env variables, etc. +# Basic initialisation template can be found below +let + archtika = pkgs.callPackage ./package.nix { }; + + postgresConf = pkgs.writeText "postgres.conf" '' + + ''; + + nginxConf = pkgs.writeText "nginx.conf" '' + + ''; + + entrypoint = pkgs.writeShellScriptBin "entrypoint" '' + + ''; +in +pkgs.dockerTools.buildLayeredImage { + name = "archtika"; + tag = "latest"; + contents = [ + archtika + entrypoint + pkgs.postgresql_16 + pkgs.nginx + pkgs.acme-sh + pkgs.bash + pkgs.coreutils + ]; + config = { + Cmd = [ "${entrypoint}/bin/entrypoint" ]; + ExposedPorts = { + "80" = { }; + "443" = { }; + }; + Volumes = { + "/var/lib/postgresql/data" = { }; + }; + }; +} diff --git a/nix/module.nix b/nix/module.nix index dcf73b1..84eeb8e 100644 --- a/nix/module.nix +++ b/nix/module.nix @@ -76,10 +76,26 @@ in description = "API secrets for the DNS-01 challenge (required for wildcard domains)."; }; - disableRegistration = mkOption { - type = types.bool; - default = false; - description = "By default any user can create an account. That behavior can be disabled by using this option."; + settings = mkOption { + type = types.submodule { + options = { + disableRegistration = mkOption { + type = types.bool; + default = false; + description = "By default any user can create an account. That behavior can be disabled by using this option."; + }; + maxUserWebsites = mkOption { + type = types.int; + default = 2; + description = "Maximum number of websites allowed per user by default."; + }; + maxWebsiteStorageSize = mkOption { + type = types.int; + default = 500; + description = "Maximum amount of disk space in MB allowed per user website by default."; + }; + }; + }; }; }; @@ -91,7 +107,7 @@ in users.groups.${cfg.group} = { }; - systemd.tmpfiles.rules = [ "d /var/www/archtika-websites 0755 ${cfg.user} ${cfg.group} -" ]; + systemd.tmpfiles.rules = [ "d /var/www/archtika-websites 0777 ${cfg.user} ${cfg.group} -" ]; systemd.services.archtika-api = { description = "archtika API service"; @@ -105,12 +121,15 @@ in User = cfg.user; Group = cfg.group; Restart = "always"; + WorkingDirectory = "${cfg.package}/rest-api"; }; script = '' JWT_SECRET=$(tr -dc 'A-Za-z0-9' < /dev/urandom | head -c64) ${pkgs.postgresql_16}/bin/psql postgres://postgres@localhost:5432/${cfg.databaseName} -c "ALTER DATABASE ${cfg.databaseName} SET \"app.jwt_secret\" TO '$JWT_SECRET'" + ${pkgs.postgresql_16}/bin/psql postgres://postgres@localhost:5432/${cfg.databaseName} -c "ALTER DATABASE ${cfg.databaseName} SET \"app.website_max_storage_size\" TO ${toString cfg.settings.maxWebsiteStorageSize}" + ${pkgs.postgresql_16}/bin/psql postgres://postgres@localhost:5432/${cfg.databaseName} -c "ALTER DATABASE ${cfg.databaseName} SET \"app.website_max_number_user\" TO ${toString cfg.settings.maxUserWebsites}" ${pkgs.dbmate}/bin/dbmate --url postgres://postgres@localhost:5432/archtika?sslmode=disable --migrations-dir ${cfg.package}/rest-api/db/migrations up @@ -131,7 +150,7 @@ in }; script = '' - REGISTRATION_IS_DISABLED=${toString cfg.disableRegistration} BODY_SIZE_LIMIT=10M ORIGIN=https://${cfg.domain} PORT=${toString cfg.webAppPort} ${pkgs.nodejs_22}/bin/node ${cfg.package}/web-app + REGISTRATION_IS_DISABLED=${toString cfg.settings.disableRegistration} BODY_SIZE_LIMIT=10M ORIGIN=https://${cfg.domain} PORT=${toString cfg.webAppPort} ${pkgs.nodejs_22}/bin/node ${cfg.package}/web-app ''; }; @@ -188,7 +207,7 @@ in default_type application/json; ''; }; - "/api/rpc/register" = mkIf cfg.disableRegistration { + "/api/rpc/register" = mkIf cfg.settings.disableRegistration { extraConfig = '' deny all; ''; diff --git a/rest-api/db/migrations/20240719071602_main_tables.sql b/rest-api/db/migrations/20240719071602_main_tables.sql index 2c023ab..fd0ef7c 100644 --- a/rest-api/db/migrations/20240719071602_main_tables.sql +++ b/rest-api/db/migrations/20240719071602_main_tables.sql @@ -9,10 +9,16 @@ CREATE ROLE anon NOLOGIN NOINHERIT; CREATE ROLE authenticated_user NOLOGIN NOINHERIT; +CREATE ROLE administrator NOLOGIN; + GRANT anon TO authenticator; GRANT authenticated_user TO authenticator; +GRANT administrator TO authenticator; + +GRANT authenticated_user TO administrator; + GRANT USAGE ON SCHEMA api TO anon; GRANT USAGE ON SCHEMA api TO authenticated_user; @@ -23,9 +29,10 @@ ALTER DEFAULT PRIVILEGES REVOKE EXECUTE ON FUNCTIONS FROM PUBLIC; CREATE TABLE internal.user ( id UUID PRIMARY KEY DEFAULT gen_random_uuid (), - username VARCHAR(16) UNIQUE NOT NULL CHECK (LENGTH(username) >= 3), + username VARCHAR(16) UNIQUE NOT NULL CHECK (LENGTH(username) >= 3 AND username ~ '^[a-zA-Z0-9_-]+$'), password_hash CHAR(60) NOT NULL, - role NAME NOT NULL DEFAULT 'authenticated_user', + user_role NAME NOT NULL DEFAULT 'authenticated_user', + max_number_websites INT NOT NULL DEFAULT CURRENT_SETTING('app.website_max_number_user') ::INT, created_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP() ); @@ -34,11 +41,11 @@ CREATE TABLE internal.website ( user_id UUID REFERENCES internal.user (id) ON DELETE CASCADE NOT NULL DEFAULT (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id') ::UUID, content_type VARCHAR(10) CHECK (content_type IN ('Blog', 'Docs')) NOT NULL, title VARCHAR(50) NOT NULL CHECK (TRIM(title) != ''), + max_storage_size INT NOT NULL DEFAULT CURRENT_SETTING('app.website_max_storage_size') ::INT, is_published BOOLEAN NOT NULL DEFAULT FALSE, created_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), - last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL, - title_search TSVECTOR GENERATED ALWAYS AS (TO_TSVECTOR('english', title)) STORED + last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL ); CREATE TABLE internal.media ( @@ -74,7 +81,8 @@ CREATE TABLE internal.header ( CREATE TABLE internal.home ( website_id UUID PRIMARY KEY REFERENCES internal.website (id) ON DELETE CASCADE, - main_content TEXT NOT NULL CHECK (TRIM(main_content) != ''), + main_content VARCHAR(200000) NOT NULL CHECK (TRIM(main_content) != ''), + meta_description VARCHAR(250) CHECK (TRIM(meta_description) != ''), last_modified_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL ); @@ -84,7 +92,7 @@ CREATE TABLE internal.docs_category ( website_id UUID REFERENCES internal.website (id) ON DELETE CASCADE NOT NULL, user_id UUID REFERENCES internal.user (id) ON DELETE SET NULL DEFAULT (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id') ::UUID, category_name VARCHAR(50) NOT NULL CHECK (TRIM(category_name) != ''), - category_weight INTEGER CHECK (category_weight >= 0) NOT NULL, + category_weight INT CHECK (category_weight >= 0) NOT NULL, created_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL, @@ -101,13 +109,12 @@ CREATE TABLE internal.article ( meta_author VARCHAR(100) CHECK (TRIM(meta_author) != ''), cover_image UUID REFERENCES internal.media (id) ON DELETE SET NULL, publication_date DATE, - main_content TEXT CHECK (TRIM(main_content) != ''), + main_content VARCHAR(200000) CHECK (TRIM(main_content) != ''), category UUID REFERENCES internal.docs_category (id) ON DELETE SET NULL, - article_weight INTEGER CHECK (article_weight IS NULL OR article_weight >= 0), + article_weight INT CHECK (article_weight IS NULL OR article_weight >= 0), created_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL, - title_description_search TSVECTOR GENERATED ALWAYS AS (TO_TSVECTOR('english', COALESCE(title, '') || ' ' || COALESCE(meta_description, ''))) STORED, UNIQUE (website_id, category, article_weight) ); @@ -120,7 +127,7 @@ CREATE TABLE internal.footer ( CREATE TABLE internal.legal_information ( website_id UUID PRIMARY KEY REFERENCES internal.website (id) ON DELETE CASCADE, - main_content TEXT NOT NULL CHECK (TRIM(main_content) != ''), + main_content VARCHAR(200000) NOT NULL CHECK (TRIM(main_content) != ''), created_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL @@ -129,7 +136,7 @@ CREATE TABLE internal.legal_information ( CREATE TABLE internal.collab ( website_id UUID REFERENCES internal.website (id) ON DELETE CASCADE, user_id UUID REFERENCES internal.user (id) ON DELETE CASCADE, - permission_level INTEGER CHECK (permission_level IN (10, 20, 30)) NOT NULL DEFAULT 10, + permission_level INT CHECK (permission_level IN (10, 20, 30)) NOT NULL DEFAULT 10, added_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_at TIMESTAMPTZ NOT NULL DEFAULT CLOCK_TIMESTAMP(), last_modified_by UUID REFERENCES internal.user (id) ON DELETE SET NULL, @@ -167,6 +174,8 @@ DROP ROLE anon; DROP ROLE authenticated_user; +DROP ROLE administrator; + DROP ROLE authenticator; ALTER DEFAULT PRIVILEGES GRANT EXECUTE ON FUNCTIONS TO PUBLIC; diff --git a/rest-api/db/migrations/20240720073454_automatic_schema_cache_reloading.sql b/rest-api/db/migrations/20240720073454_automatic_schema_cache_reloading.sql index 83b1001..1c9c1c9 100644 --- a/rest-api/db/migrations/20240720073454_automatic_schema_cache_reloading.sql +++ b/rest-api/db/migrations/20240720073454_automatic_schema_cache_reloading.sql @@ -1,5 +1,5 @@ -- migrate:up -CREATE FUNCTION pgrst_watch () +CREATE FUNCTION internal.pgrst_watch () RETURNS EVENT_TRIGGER AS $$ BEGIN @@ -10,10 +10,10 @@ $$ LANGUAGE plpgsql; CREATE EVENT TRIGGER pgrst_watch ON ddl_command_end - EXECUTE FUNCTION pgrst_watch (); + EXECUTE FUNCTION internal.pgrst_watch (); -- migrate:down DROP EVENT TRIGGER pgrst_watch; -DROP FUNCTION pgrst_watch (); +DROP FUNCTION internal.pgrst_watch; diff --git a/rest-api/db/migrations/20240720074103_user_management_roles_jwt.sql b/rest-api/db/migrations/20240720074103_user_management_roles_jwt.sql index a3c4505..28af12b 100644 --- a/rest-api/db/migrations/20240720074103_user_management_roles_jwt.sql +++ b/rest-api/db/migrations/20240720074103_user_management_roles_jwt.sql @@ -13,9 +13,9 @@ BEGIN FROM pg_roles AS r WHERE - r.rolname = NEW.role)) THEN + r.rolname = NEW.user_role)) THEN RAISE foreign_key_violation - USING message = 'Unknown database role: ' || NEW.role; + USING message = 'Unknown database role: ' || NEW.user_role; END IF; RETURN NULL; END @@ -48,7 +48,7 @@ CREATE FUNCTION internal.user_role (username TEXT, pass TEXT, OUT role_name NAME AS $$ BEGIN SELECT - ROLE INTO role_name + u.user_role INTO role_name FROM internal.user AS u WHERE @@ -96,8 +96,17 @@ BEGIN RAISE invalid_parameter_value USING message = 'Password must contain at least one special character'; ELSE - INSERT INTO internal.user (username, password_hash) - VALUES (register.username, register.pass) + INSERT INTO internal.user (username, password_hash, user_role) + SELECT + register.username, + register.pass, + CASE WHEN COUNT(*) = 0 THEN + 'administrator' + ELSE + 'authenticated_user' + END + FROM + internal.user RETURNING id INTO user_id; END IF; @@ -111,7 +120,7 @@ AS $$ DECLARE _role NAME; _user_id UUID; - _exp INTEGER; + _exp INT := EXTRACT(EPOCH FROM CLOCK_TIMESTAMP())::INT + 86400; BEGIN SELECT internal.user_role (login.username, login.pass) INTO _role; @@ -120,12 +129,11 @@ BEGIN USING message = 'Invalid username or password'; ELSE SELECT - id INTO _user_id + u.id INTO _user_id FROM internal.user AS u WHERE u.username = login.username; - _exp := EXTRACT(EPOCH FROM CLOCK_TIMESTAMP())::INTEGER + 86400; SELECT SIGN(JSON_BUILD_OBJECT('role', _role, 'user_id', _user_id, 'username', login.username, 'exp', _exp), CURRENT_SETTING('app.jwt_secret')) INTO token; END IF; @@ -155,28 +163,28 @@ $$ LANGUAGE plpgsql SECURITY DEFINER; -GRANT EXECUTE ON FUNCTION api.register (TEXT, TEXT) TO anon; +GRANT EXECUTE ON FUNCTION api.register TO anon; -GRANT EXECUTE ON FUNCTION api.login (TEXT, TEXT) TO anon; +GRANT EXECUTE ON FUNCTION api.login TO anon; -GRANT EXECUTE ON FUNCTION api.delete_account (TEXT) TO authenticated_user; +GRANT EXECUTE ON FUNCTION api.delete_account TO authenticated_user; -- migrate:down DROP TRIGGER encrypt_pass ON internal.user; DROP TRIGGER ensure_user_role_exists ON internal.user; -DROP FUNCTION api.register (TEXT, TEXT); +DROP FUNCTION api.register; -DROP FUNCTION api.login (TEXT, TEXT); +DROP FUNCTION api.login; -DROP FUNCTION api.delete_account (TEXT); +DROP FUNCTION api.delete_account; -DROP FUNCTION internal.user_role (TEXT, TEXT); +DROP FUNCTION internal.user_role; -DROP FUNCTION internal.encrypt_pass (); +DROP FUNCTION internal.encrypt_pass; -DROP FUNCTION internal.check_role_exists (); +DROP FUNCTION internal.check_role_exists; DROP EXTENSION pgjwt; diff --git a/rest-api/db/migrations/20240720132802_exposed_views_functions.sql b/rest-api/db/migrations/20240720132802_exposed_views_functions.sql index 0e7e4aa..fa3a980 100644 --- a/rest-api/db/migrations/20240720132802_exposed_views_functions.sql +++ b/rest-api/db/migrations/20240720132802_exposed_views_functions.sql @@ -15,7 +15,9 @@ CREATE VIEW api.user WITH ( security_invoker = ON ) AS SELECT id, - username + username, + created_at, + max_number_websites FROM internal.user; @@ -87,38 +89,46 @@ AS $$ DECLARE _website_id UUID; _user_id UUID := (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id')::UUID; + _user_website_count INT := ( + SELECT + COUNT(*) + FROM + internal.website AS w + WHERE + w.user_id = _user_id); + _user_max_websites_allowed_count INT := ( + SELECT + u.max_number_websites + FROM + internal.user AS u + WHERE + id = _user_id); BEGIN - INSERT INTO internal.website (content_type, title) - VALUES (create_website.content_type, create_website.title) - RETURNING - id INTO _website_id; - INSERT INTO internal.settings (website_id) - VALUES (_website_id); - INSERT INTO internal.header (website_id, logo_text) - VALUES (_website_id, 'archtika ' || create_website.content_type); - INSERT INTO internal.home (website_id, main_content) - VALUES (_website_id, '## About + IF (_user_website_count + 1 > _user_max_websites_allowed_count) THEN + RAISE invalid_parameter_value + USING message = FORMAT('Limit of %s websites exceeded', _user_max_websites_allowed_count); + END IF; + INSERT INTO internal.website (content_type, title) + VALUES (create_website.content_type, create_website.title) + RETURNING + id INTO _website_id; + INSERT INTO internal.settings (website_id) + VALUES (_website_id); + INSERT INTO internal.header (website_id, logo_text) + VALUES (_website_id, 'archtika ' || create_website.content_type); + INSERT INTO internal.home (website_id, main_content) + VALUES (_website_id, '## About -archtika is a FLOSS, modern, performant and lightweight CMS (Content Mangement System) in the form of a web application. It allows you to easily create, manage and publish minimal, responsive and SEO friendly blogging and documentation websites with official, professionally designed templates. - -It is also possible to add contributors to your sites, which is very useful for larger projects where, for example, several people are constantly working on the documentation. - -## How it works - -For the backend, PostgreSQL is used in combination with PostgREST to create a RESTful API. JSON web tokens along with row-level security control authentication and authorisation flows. - -The web application uses SvelteKit with SSR (Server Side Rendering) and Svelte version 5, currently in beta. - -NGINX is used to deploy the websites, serving the static site files from the `/var/www/archtika-websites` directory. The static files can be found in this directory via the path `/`, which is dynamically created by the web application.'); - INSERT INTO internal.footer (website_id, additional_text) - VALUES (_website_id, 'archtika is a free, open, modern, performant and lightweight CMS'); - website_id := _website_id; +archtika is a FLOSS, modern, performant and lightweight CMS (Content Mangement System) in the form of a web application. It allows you to easily create, manage and publish minimal, responsive and SEO friendly blogging and documentation websites with official, professionally designed templates. It is also possible to add contributors to your sites, which is very useful for larger projects where, for example, several people are constantly working on the documentation.'); + INSERT INTO internal.footer (website_id, additional_text) + VALUES (_website_id, 'archtika is a free, open, modern, performant and lightweight CMS'); + website_id := _website_id; END; $$ LANGUAGE plpgsql SECURITY DEFINER; -GRANT EXECUTE ON FUNCTION api.create_website (VARCHAR(10), VARCHAR(50)) TO authenticated_user; +GRANT EXECUTE ON FUNCTION api.create_website TO authenticated_user; -- Security invoker only works on views if the user has access to the underlying table GRANT SELECT ON internal.user TO authenticated_user; @@ -139,7 +149,7 @@ GRANT SELECT, UPDATE (logo_type, logo_text, logo_image) ON internal.header TO au GRANT SELECT, UPDATE ON api.header TO authenticated_user; -GRANT SELECT, UPDATE (main_content) ON internal.home TO authenticated_user; +GRANT SELECT, UPDATE (main_content, meta_description) ON internal.home TO authenticated_user; GRANT SELECT, UPDATE ON api.home TO authenticated_user; @@ -164,7 +174,7 @@ GRANT SELECT, INSERT (website_id, user_id, permission_level), UPDATE (permission GRANT SELECT, INSERT, UPDATE, DELETE ON api.collab TO authenticated_user; -- migrate:down -DROP FUNCTION api.create_website (VARCHAR(10), VARCHAR(50)); +DROP FUNCTION api.create_website; DROP VIEW api.collab; diff --git a/rest-api/db/migrations/20240724191017_row_level_security.sql b/rest-api/db/migrations/20240724191017_row_level_security.sql index ae41e55..479973c 100644 --- a/rest-api/db/migrations/20240724191017_row_level_security.sql +++ b/rest-api/db/migrations/20240724191017_row_level_security.sql @@ -21,7 +21,7 @@ ALTER TABLE internal.legal_information ENABLE ROW LEVEL SECURITY; ALTER TABLE internal.collab ENABLE ROW LEVEL SECURITY; -CREATE FUNCTION internal.user_has_website_access (website_id UUID, required_permission INTEGER, collaborator_permission_level INTEGER DEFAULT NULL, collaborator_user_id UUID DEFAULT NULL, article_user_id UUID DEFAULT NULL, raise_error BOOLEAN DEFAULT TRUE, OUT has_access BOOLEAN) +CREATE FUNCTION internal.user_has_website_access (website_id UUID, required_permission INT, collaborator_permission_level INT DEFAULT NULL, collaborator_user_id UUID DEFAULT NULL, article_user_id UUID DEFAULT NULL, raise_error BOOLEAN DEFAULT TRUE, OUT has_access BOOLEAN) AS $$ DECLARE _user_id UUID := (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id')::UUID; @@ -63,19 +63,29 @@ $$ LANGUAGE plpgsql SECURITY DEFINER; -GRANT EXECUTE ON FUNCTION internal.user_has_website_access (UUID, INTEGER, INTEGER, UUID, UUID, BOOLEAN) TO authenticated_user; +GRANT EXECUTE ON FUNCTION internal.user_has_website_access TO authenticated_user; CREATE POLICY view_user ON internal.user FOR SELECT USING (TRUE); +CREATE POLICY update_user ON internal.user + FOR UPDATE + USING ((CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'role') = 'administrator'); + +CREATE POLICY delete_user ON internal.user + FOR DELETE + USING ((CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'role') = 'administrator'); + CREATE POLICY view_websites ON internal.website FOR SELECT - USING (internal.user_has_website_access (id, 10, raise_error => FALSE)); + USING ((CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'role') = 'administrator' + OR internal.user_has_website_access (id, 10, raise_error => FALSE)); CREATE POLICY update_website ON internal.website FOR UPDATE - USING (internal.user_has_website_access (id, 20)); + USING ((CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'role') = 'administrator' + OR internal.user_has_website_access (id, 30)); CREATE POLICY delete_website ON internal.website FOR DELETE @@ -180,6 +190,10 @@ CREATE POLICY delete_collaborations ON internal.collab -- migrate:down DROP POLICY view_user ON internal.user; +DROP POLICY update_user ON internal.user; + +DROP POLICY delete_user ON internal.user; + DROP POLICY view_websites ON internal.website; DROP POLICY delete_website ON internal.website; @@ -234,7 +248,7 @@ DROP POLICY update_collaborations ON internal.collab; DROP POLICY delete_collaborations ON internal.collab; -DROP FUNCTION internal.user_has_website_access (UUID, INTEGER, INTEGER, UUID, UUID, BOOLEAN); +DROP FUNCTION internal.user_has_website_access; ALTER TABLE internal.user DISABLE ROW LEVEL SECURITY; diff --git a/rest-api/db/migrations/20240805132306_last_modified_triggers.sql b/rest-api/db/migrations/20240805132306_last_modified_triggers.sql index bcc55e1..71075cb 100644 --- a/rest-api/db/migrations/20240805132306_last_modified_triggers.sql +++ b/rest-api/db/migrations/20240805132306_last_modified_triggers.sql @@ -7,11 +7,11 @@ DECLARE BEGIN IF (NOT EXISTS ( SELECT - id + u.id FROM - internal.user + internal.user AS u WHERE - id = _user_id)) THEN + u.id = _user_id)) THEN RETURN COALESCE(NEW, OLD); END IF; IF TG_OP != 'DELETE' THEN @@ -97,5 +97,5 @@ DROP TRIGGER update_legal_information_last_modified ON internal.legal_informatio DROP TRIGGER update_collab_last_modified ON internal.collab; -DROP FUNCTION internal.update_last_modified (); +DROP FUNCTION internal.update_last_modified; diff --git a/rest-api/db/migrations/20240808141708_collaborator_not_owner.sql b/rest-api/db/migrations/20240808141708_collaborator_not_owner.sql index bf46316..6a0474b 100644 --- a/rest-api/db/migrations/20240808141708_collaborator_not_owner.sql +++ b/rest-api/db/migrations/20240808141708_collaborator_not_owner.sql @@ -26,5 +26,5 @@ CREATE CONSTRAINT TRIGGER check_user_not_website_owner -- migrate:down DROP TRIGGER check_user_not_website_owner ON internal.collab; -DROP FUNCTION internal.check_user_not_website_owner (); +DROP FUNCTION internal.check_user_not_website_owner; diff --git a/rest-api/db/migrations/20240810115846_image_upload_function.sql b/rest-api/db/migrations/20240810115846_image_upload_function.sql index dd9af74..a3e9495 100644 --- a/rest-api/db/migrations/20240810115846_image_upload_function.sql +++ b/rest-api/db/migrations/20240810115846_image_upload_function.sql @@ -6,13 +6,31 @@ AS $$ DECLARE _headers JSON := CURRENT_SETTING('request.headers', TRUE)::JSON; _website_id UUID := (_headers ->> 'x-website-id')::UUID; - _mimetype TEXT := _headers ->> 'x-mimetype'; _original_filename TEXT := _headers ->> 'x-original-filename'; _allowed_mimetypes TEXT[] := ARRAY['image/png', 'image/jpeg', 'image/webp', 'image/avif', 'image/gif', 'image/svg+xml']; - _max_file_size INT := 5 * 1024 * 1024; + _max_file_size BIGINT := 5 * 1024 * 1024; _has_access BOOLEAN; + _mimetype TEXT; BEGIN _has_access = internal.user_has_website_access (_website_id, 20); + _mimetype := CASE WHEN SUBSTRING($1 FROM 1 FOR 8) = '\x89504E470D0A1A0A'::BYTEA THEN + 'image/png' + WHEN SUBSTRING($1 FROM 1 FOR 3) = '\xFFD8FF'::BYTEA THEN + 'image/jpeg' + WHEN SUBSTRING($1 FROM 1 FOR 4) = '\x52494646'::BYTEA + AND SUBSTRING($1 FROM 9 FOR 4) = '\x57454250'::BYTEA THEN + 'image/webp' + WHEN SUBSTRING($1 FROM 5 FOR 7) = '\x66747970617669'::BYTEA THEN + 'image/avif' + WHEN SUBSTRING($1 FROM 1 FOR 6) = '\x474946383761'::BYTEA + OR SUBSTRING($1 FROM 1 FOR 6) = '\x474946383961'::BYTEA THEN + 'image/gif' + WHEN SUBSTRING($1 FROM 1 FOR 5) = '\x3C3F786D6C'::BYTEA + OR SUBSTRING($1 FROM 1 FOR 4) = '\x3C737667'::BYTEA THEN + 'image/svg+xml' + ELSE + NULL + END; IF OCTET_LENGTH($1) = 0 THEN RAISE invalid_parameter_value USING message = 'No file data was provided'; @@ -21,10 +39,10 @@ BEGIN SELECT UNNEST(_allowed_mimetypes))) THEN RAISE invalid_parameter_value - USING message = 'Invalid MIME type. Allowed types are: png, jpg, webp'; + USING message = 'Invalid MIME type. Allowed types are: png, jpg, webp, avif, gif, svg'; ELSIF OCTET_LENGTH($1) > _max_file_size THEN RAISE program_limit_exceeded - USING message = FORMAT('File size exceeds the maximum limit of %s MB', _max_file_size / (1024 * 1024)); + USING message = FORMAT('File size exceeds the maximum limit of %s', PG_SIZE_PRETTY(_max_file_size)); ELSE INSERT INTO internal.media (website_id, blob, mimetype, original_name) VALUES (_website_id, $1, _mimetype, _original_filename) @@ -56,7 +74,7 @@ BEGIN SELECT m.blob FROM - internal.media m + internal.media AS m WHERE m.id = retrieve_file.id INTO _blob; IF FOUND THEN @@ -70,16 +88,16 @@ $$ LANGUAGE plpgsql SECURITY DEFINER; -GRANT EXECUTE ON FUNCTION api.upload_file (BYTEA) TO authenticated_user; +GRANT EXECUTE ON FUNCTION api.upload_file TO authenticated_user; -GRANT EXECUTE ON FUNCTION api.retrieve_file (UUID) TO anon; +GRANT EXECUTE ON FUNCTION api.retrieve_file TO anon; -GRANT EXECUTE ON FUNCTION api.retrieve_file (UUID) TO authenticated_user; +GRANT EXECUTE ON FUNCTION api.retrieve_file TO authenticated_user; -- migrate:down -DROP FUNCTION api.upload_file (BYTEA); +DROP FUNCTION api.upload_file; -DROP FUNCTION api.retrieve_file (UUID); +DROP FUNCTION api.retrieve_file; DROP DOMAIN "*/*"; diff --git a/rest-api/db/migrations/20240911070907_change_log.sql b/rest-api/db/migrations/20240911070907_change_log.sql index be5574e..580f03e 100644 --- a/rest-api/db/migrations/20240911070907_change_log.sql +++ b/rest-api/db/migrations/20240911070907_change_log.sql @@ -13,102 +13,6 @@ CREATE TABLE internal.change_log ( new_value HSTORE ); -CREATE FUNCTION internal.track_changes () - RETURNS TRIGGER - AS $$ -DECLARE - _website_id UUID; - _user_id UUID := (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id')::UUID; -BEGIN - IF (NOT EXISTS ( - SELECT - id - FROM - internal.user - WHERE - id = _user_id) OR (to_jsonb (OLD.*) - 'last_modified_at' - 'last_modified_by') = (to_jsonb (NEW.*) - 'last_modified_at' - 'last_modified_by')) THEN - RETURN NULL; - END IF; - IF TG_TABLE_NAME = 'website' THEN - _website_id := NEW.id; - ELSE - _website_id := COALESCE(NEW.website_id, OLD.website_id); - END IF; - IF TG_OP = 'INSERT' THEN - INSERT INTO internal.change_log (website_id, table_name, operation, new_value) - VALUES (_website_id, TG_TABLE_NAME, TG_OP, HSTORE (NEW)); - ELSIF (TG_OP = 'UPDATE' - AND EXISTS ( - SELECT - id - FROM - internal.website - WHERE - id = _website_id)) THEN - INSERT INTO internal.change_log (website_id, table_name, operation, old_value, new_value) - VALUES (_website_id, TG_TABLE_NAME, TG_OP, HSTORE (OLD) - HSTORE (NEW), HSTORE (NEW) - HSTORE (OLD)); - ELSIF (TG_OP = 'DELETE' - AND EXISTS ( - SELECT - id - FROM - internal.website - WHERE - id = _website_id)) THEN - INSERT INTO internal.change_log (website_id, table_name, operation, old_value) - VALUES (_website_id, TG_TABLE_NAME, TG_OP, HSTORE (OLD)); - END IF; - RETURN NULL; -END; -$$ -LANGUAGE plpgsql -SECURITY DEFINER; - -CREATE TRIGGER website_track_changes - AFTER UPDATE ON internal.website - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER settings_track_changes - AFTER UPDATE ON internal.settings - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER header_track_changes - AFTER UPDATE ON internal.header - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER home_track_changes - AFTER UPDATE ON internal.home - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER article_track_changes - AFTER INSERT OR UPDATE OR DELETE ON internal.article - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER docs_category_track_changes - AFTER INSERT OR UPDATE OR DELETE ON internal.docs_category - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER footer_track_changes - AFTER UPDATE ON internal.footer - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER legal_information_track_changes - AFTER INSERT OR UPDATE OR DELETE ON internal.legal_information - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - -CREATE TRIGGER collab_track_changes - AFTER INSERT OR UPDATE OR DELETE ON internal.collab - FOR EACH ROW - EXECUTE FUNCTION internal.track_changes (); - CREATE VIEW api.change_log WITH ( security_invoker = ON ) AS SELECT @@ -120,26 +24,141 @@ GRANT SELECT ON internal.change_log TO authenticated_user; GRANT SELECT ON api.change_log TO authenticated_user; +ALTER TABLE internal.change_log ENABLE ROW LEVEL SECURITY; + +CREATE POLICY view_change_log ON internal.change_log + FOR SELECT + USING (internal.user_has_website_access (website_id, 10)); + +CREATE FUNCTION internal.track_changes () + RETURNS TRIGGER + AS $$ +DECLARE + _website_id UUID; + _user_id UUID := (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id')::UUID; + _new_value HSTORE; +BEGIN + IF (NOT EXISTS ( + SELECT + u.id + FROM + internal.user AS u + WHERE + u.id = _user_id) OR REGEXP_REPLACE((to_jsonb (OLD.*) - 'last_modified_at' - 'last_modified_by')::TEXT, '\r\n|\r', '\n', 'g') = REGEXP_REPLACE((to_jsonb (NEW.*) - 'last_modified_at' - 'last_modified_by')::TEXT, '\r\n|\r', '\n', 'g')) THEN + RETURN NULL; + END IF; + IF TG_TABLE_NAME = 'website' THEN + _website_id := NEW.id; + ELSE + _website_id := COALESCE(NEW.website_id, OLD.website_id); + END IF; + IF TG_OP = 'INSERT' THEN + _new_value := CASE WHEN TG_TABLE_NAME = 'media' THEN + HSTORE (NEW) - 'blob'::TEXT + ELSE + HSTORE (NEW) + END; + INSERT INTO internal.change_log (website_id, table_name, operation, new_value) + VALUES (_website_id, TG_TABLE_NAME, TG_OP, _new_value); + ELSIF (TG_OP = 'UPDATE' + AND EXISTS ( + SELECT + w.id + FROM + internal.website AS w + WHERE + w.id = _website_id)) THEN + INSERT INTO internal.change_log (website_id, table_name, operation, old_value, new_value) + VALUES (_website_id, TG_TABLE_NAME, TG_OP, HSTORE (OLD) - HSTORE (NEW), HSTORE (NEW) - HSTORE (OLD)); + ELSIF (TG_OP = 'DELETE' + AND EXISTS ( + SELECT + w.id + FROM + internal.website AS w + WHERE + w.id = _website_id)) THEN + INSERT INTO internal.change_log (website_id, table_name, operation, old_value) + VALUES (_website_id, TG_TABLE_NAME, TG_OP, HSTORE (OLD)); + END IF; + RETURN NULL; +END; +$$ +LANGUAGE plpgsql +SECURITY DEFINER; + +CREATE TRIGGER track_changes_website + AFTER UPDATE ON internal.website + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_media + AFTER INSERT ON internal.media + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_settings + AFTER UPDATE ON internal.settings + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_header + AFTER UPDATE ON internal.header + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_home + AFTER UPDATE ON internal.home + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_article + AFTER INSERT OR UPDATE OR DELETE ON internal.article + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_docs_category + AFTER INSERT OR UPDATE OR DELETE ON internal.docs_category + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_footer + AFTER UPDATE ON internal.footer + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_legal_information + AFTER INSERT OR UPDATE OR DELETE ON internal.legal_information + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + +CREATE TRIGGER track_changes_collab + AFTER INSERT OR UPDATE OR DELETE ON internal.collab + FOR EACH ROW + EXECUTE FUNCTION internal.track_changes (); + -- migrate:down -DROP TRIGGER website_track_changes ON internal.website; +DROP TRIGGER track_changes_website ON internal.website; -DROP TRIGGER settings_track_changes ON internal.settings; +DROP TRIGGER track_changes_media ON internal.media; -DROP TRIGGER header_track_changes ON internal.header; +DROP TRIGGER track_changes_settings ON internal.settings; -DROP TRIGGER home_track_changes ON internal.home; +DROP TRIGGER track_changes_header ON internal.header; -DROP TRIGGER article_track_changes ON internal.article; +DROP TRIGGER track_changes_home ON internal.home; -DROP TRIGGER docs_category_track_changes ON internal.docs_category; +DROP TRIGGER track_changes_article ON internal.article; -DROP TRIGGER footer_track_changes ON internal.footer; +DROP TRIGGER track_changes_docs_category ON internal.docs_category; -DROP TRIGGER legal_information_track_changes ON internal.legal_information; +DROP TRIGGER track_changes_footer ON internal.footer; -DROP TRIGGER collab_track_changes ON internal.collab; +DROP TRIGGER track_changes_legal_information ON internal.legal_information; -DROP FUNCTION internal.track_changes (); +DROP TRIGGER track_changes_collab ON internal.collab; + +DROP FUNCTION internal.track_changes; DROP VIEW api.change_log; diff --git a/rest-api/db/migrations/20240920090915_custom_domain_prefix.sql b/rest-api/db/migrations/20240920090915_custom_domain_prefix.sql index ba1dece..cfdd78f 100644 --- a/rest-api/db/migrations/20240920090915_custom_domain_prefix.sql +++ b/rest-api/db/migrations/20240920090915_custom_domain_prefix.sql @@ -41,13 +41,13 @@ CREATE TRIGGER update_domain_prefix_last_modified FOR EACH ROW EXECUTE FUNCTION internal.update_last_modified (); -CREATE TRIGGER domain_prefix_track_changes +CREATE TRIGGER track_changes_domain_prefix AFTER INSERT OR UPDATE OR DELETE ON internal.domain_prefix FOR EACH ROW EXECUTE FUNCTION internal.track_changes (); -- migrate:down -DROP TRIGGER domain_prefix_track_changes ON internal.domain_prefix; +DROP TRIGGER track_changes_domain_prefix ON internal.domain_prefix; DROP TRIGGER update_domain_prefix_last_modified ON internal.domain_prefix; diff --git a/rest-api/db/migrations/20241006165029_administrator.sql b/rest-api/db/migrations/20241006165029_administrator.sql new file mode 100644 index 0000000..4e28b0b --- /dev/null +++ b/rest-api/db/migrations/20241006165029_administrator.sql @@ -0,0 +1,185 @@ +-- migrate:up +CREATE FUNCTION api.user_websites_storage_size () + RETURNS TABLE ( + website_id UUID, + website_title VARCHAR(50), + storage_size_bytes BIGINT, + storage_size_pretty TEXT, + max_storage_bytes BIGINT, + max_storage_pretty TEXT, + diff_storage_pretty TEXT + ) + AS $$ +DECLARE + _user_id UUID := (CURRENT_SETTING('request.jwt.claims', TRUE)::JSON ->> 'user_id')::UUID; + _tables TEXT[] := ARRAY['article', 'collab', 'docs_category', 'domain_prefix', 'footer', 'header', 'home', 'legal_information', 'media', 'settings', 'change_log']; + _query TEXT; + _union_queries TEXT := ''; +BEGIN + FOR i IN 1..ARRAY_LENGTH(_tables, 1) + LOOP + _union_queries := _union_queries || FORMAT(' + SELECT SUM(PG_COLUMN_SIZE(t)) FROM internal.%s AS t WHERE t.website_id = w.id', _tables[i]); + IF i < ARRAY_LENGTH(_tables, 1) THEN + _union_queries := _union_queries || ' UNION ALL '; + END IF; + END LOOP; + _query := FORMAT(' + SELECT + w.id AS website_id, + w.title AS website_title, + COALESCE(SUM(sizes.total_size), 0)::BIGINT AS storage_size_bytes, + PG_SIZE_PRETTY(COALESCE(SUM(sizes.total_size), 0)) AS storage_size_pretty, + (w.max_storage_size::BIGINT * 1024 * 1024) AS max_storage_bytes, + PG_SIZE_PRETTY(w.max_storage_size::BIGINT * 1024 * 1024) AS max_storage_pretty, + PG_SIZE_PRETTY((w.max_storage_size::BIGINT * 1024 * 1024) - COALESCE(SUM(sizes.total_size), 0)) AS diff_storage_pretty + FROM + internal.website AS w + LEFT JOIN LATERAL ( + %s + ) AS sizes(total_size) ON TRUE + WHERE + w.user_id = $1 + GROUP BY + w.id, + w.title + ORDER BY + storage_size_bytes DESC', _union_queries); + RETURN QUERY EXECUTE _query + USING _user_id; +END; +$$ +LANGUAGE plpgsql +SECURITY DEFINER; + +GRANT EXECUTE ON FUNCTION api.user_websites_storage_size TO authenticated_user; + +CREATE FUNCTION internal.prevent_website_storage_size_excess () + RETURNS TRIGGER + AS $$ +DECLARE + _website_id UUID := NEW.website_id; + _current_size BIGINT; + _size_difference BIGINT := PG_COLUMN_SIZE(NEW) - COALESCE(PG_COLUMN_SIZE(OLD), 0); + _max_storage_mb INT := ( + SELECT + w.max_storage_size + FROM + internal.website AS w + WHERE + w.id = _website_id); + _max_storage_bytes BIGINT := _max_storage_mb::BIGINT * 1024 * 1024; + _tables TEXT[] := ARRAY['article', 'collab', 'docs_category', 'domain_prefix', 'footer', 'header', 'home', 'legal_information', 'media', 'settings', 'change_log']; + _union_queries TEXT := ''; + _query TEXT; +BEGIN + FOR i IN 1..ARRAY_LENGTH(_tables, 1) + LOOP + _union_queries := _union_queries || FORMAT(' + SELECT SUM(PG_COLUMN_SIZE(t)) FROM internal.%s AS t WHERE t.website_id = $1', _tables[i]); + IF i < ARRAY_LENGTH(_tables, 1) THEN + _union_queries := _union_queries || ' UNION ALL '; + END IF; + END LOOP; + _query := FORMAT(' + SELECT COALESCE(SUM(sizes.total_size), 0)::BIGINT + FROM (%s) AS sizes(total_size)', _union_queries); + EXECUTE _query INTO _current_size + USING _website_id; + IF (_current_size + _size_difference) > _max_storage_bytes THEN + RAISE program_limit_exceeded + USING message = FORMAT('Storage limit exceeded. Current size: %s, Max size: %s', PG_SIZE_PRETTY(_current_size), PG_SIZE_PRETTY(_max_storage_bytes)); + END IF; + RETURN NEW; +END; +$$ +LANGUAGE plpgsql +SECURITY DEFINER; + +CREATE TRIGGER _prevent_storage_excess_article + BEFORE INSERT OR UPDATE ON internal.article + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_collab + BEFORE INSERT OR UPDATE ON internal.collab + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_docs_category + BEFORE INSERT OR UPDATE ON internal.docs_category + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_domain_prefix + BEFORE INSERT OR UPDATE ON internal.domain_prefix + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_footer + BEFORE UPDATE ON internal.footer + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_header + BEFORE UPDATE ON internal.header + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_home + BEFORE UPDATE ON internal.home + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_legal_information + BEFORE INSERT OR UPDATE ON internal.legal_information + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_media + BEFORE INSERT ON internal.media + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +CREATE TRIGGER _prevent_storage_excess_settings + BEFORE UPDATE ON internal.settings + FOR EACH ROW + EXECUTE FUNCTION internal.prevent_website_storage_size_excess (); + +GRANT UPDATE (max_storage_size) ON internal.website TO administrator; + +GRANT UPDATE, DELETE ON internal.user TO administrator; + +GRANT UPDATE, DELETE ON api.user TO administrator; + +-- migrate:down +DROP FUNCTION api.user_websites_storage_size; + +DROP TRIGGER _prevent_storage_excess_article ON internal.article; + +DROP TRIGGER _prevent_storage_excess_collab ON internal.collab; + +DROP TRIGGER _prevent_storage_excess_docs_category ON internal.docs_category; + +DROP TRIGGER _prevent_storage_excess_domain_prefix ON internal.domain_prefix; + +DROP TRIGGER _prevent_storage_excess_footer ON internal.footer; + +DROP TRIGGER _prevent_storage_excess_header ON internal.header; + +DROP TRIGGER _prevent_storage_excess_home ON internal.home; + +DROP TRIGGER _prevent_storage_excess_legal_information ON internal.legal_information; + +DROP TRIGGER _prevent_storage_excess_media ON internal.media; + +DROP TRIGGER _prevent_storage_excess_settings ON internal.settings; + +DROP FUNCTION internal.prevent_website_storage_size_excess; + +REVOKE UPDATE (max_storage_size) ON internal.website FROM administrator; + +REVOKE UPDATE, DELETE ON internal.user FROM administrator; + +REVOKE UPDATE, DELETE ON api.user FROM administrator; + diff --git a/rest-api/db/migrations/20241011092744_filesystem_triggers.sql b/rest-api/db/migrations/20241011092744_filesystem_triggers.sql new file mode 100644 index 0000000..7c396f5 --- /dev/null +++ b/rest-api/db/migrations/20241011092744_filesystem_triggers.sql @@ -0,0 +1,54 @@ +-- migrate:up +CREATE FUNCTION internal.cleanup_filesystem () + RETURNS TRIGGER + AS $$ +DECLARE + _website_id UUID; + _domain_prefix VARCHAR(16); + _base_path CONSTANT TEXT := '/var/www/archtika-websites/'; + _preview_path TEXT; + _prod_path TEXT; +BEGIN + IF TG_TABLE_NAME = 'website' THEN + _website_id := OLD.id; + ELSE + _website_id := OLD.website_id; + END IF; + SELECT + d.prefix INTO _domain_prefix + FROM + internal.domain_prefix d + WHERE + d.website_id = _website_id; + _preview_path := _base_path || 'previews/' || _website_id; + _prod_path := _base_path || COALESCE(_domain_prefix, _website_id::TEXT); + IF TG_TABLE_NAME = 'website' THEN + EXECUTE FORMAT('COPY (SELECT '''') TO PROGRAM ''rm -rf %s''', _preview_path); + EXECUTE FORMAT('COPY (SELECT '''') TO PROGRAM ''rm -rf %s''', _prod_path); + ELSE + EXECUTE FORMAT('COPY (SELECT '''') TO PROGRAM ''rm -f %s/legal-information.html''', _preview_path); + EXECUTE FORMAT('COPY (SELECT '''') TO PROGRAM ''rm -f %s/legal-information.html''', _prod_path); + END IF; + RETURN OLD; +END; +$$ +LANGUAGE plpgsql +SECURITY DEFINER; + +CREATE TRIGGER _cleanup_filesystem_website + BEFORE DELETE ON internal.website + FOR EACH ROW + EXECUTE FUNCTION internal.cleanup_filesystem (); + +CREATE TRIGGER _cleanup_filesystem_legal_information + BEFORE DELETE ON internal.legal_information + FOR EACH ROW + EXECUTE FUNCTION internal.cleanup_filesystem (); + +-- migrate:down +DROP TRIGGER _cleanup_filesystem_website ON internal.website; + +DROP TRIGGER _cleanup_filesystem_legal_information ON internal.legal_information; + +DROP FUNCTION internal.cleanup_filesystem; + diff --git a/web-app/package-lock.json b/web-app/package-lock.json index 12e611a..7ef462a 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -8,18 +8,19 @@ "name": "web-app", "version": "0.0.1", "dependencies": { - "fast-diff": "1.3.0", + "diff-match-patch": "1.0.5", "highlight.js": "11.10.0", "isomorphic-dompurify": "2.15.0", "marked": "14.1.2", "marked-highlight": "2.1.4" }, "devDependencies": { - "@playwright/test": "1.46.0", + "@playwright/test": "1.47.0", "@sveltejs/adapter-auto": "3.2.5", "@sveltejs/adapter-node": "5.2.3", "@sveltejs/kit": "2.5.28", "@sveltejs/vite-plugin-svelte": "4.0.0-next.6", + "@types/diff-match-patch": "1.0.36", "@types/eslint": "9.6.1", "@types/eslint__js": "8.42.3", "@types/eslint-config-prettier": "6.11.3", @@ -764,13 +765,13 @@ } }, "node_modules/@playwright/test": { - "version": "1.46.0", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.46.0.tgz", - "integrity": "sha512-/QYft5VArOrGRP5pgkrfKksqsKA6CEFyGQ/gjNe6q0y4tZ1aaPfq4gIjudr1s3D+pXyrPRdsy4opKDrjBabE5w==", + "version": "1.47.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.47.0.tgz", + "integrity": "sha512-SgAdlSwYVpToI4e/IH19IHHWvoijAYH5hu2MWSXptRypLSnzj51PcGD+rsOXFayde4P9ZLi+loXVwArg6IUkCA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright": "1.46.0" + "playwright": "1.47.0" }, "bin": { "playwright": "cli.js" @@ -1215,6 +1216,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/diff-match-patch": { + "version": "1.0.36", + "resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz", + "integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/dompurify": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz", @@ -2044,6 +2052,12 @@ "dev": true, "license": "MIT" }, + "node_modules/diff-match-patch": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", + "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==", + "license": "Apache-2.0" + }, "node_modules/dompurify": { "version": "3.1.6", "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.6.tgz", @@ -2485,12 +2499,6 @@ "dev": true, "license": "MIT" }, - "node_modules/fast-diff": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", - "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", - "license": "Apache-2.0" - }, "node_modules/fast-glob": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", @@ -3647,13 +3655,13 @@ } }, "node_modules/playwright": { - "version": "1.46.0", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.46.0.tgz", - "integrity": "sha512-XYJ5WvfefWONh1uPAUAi0H2xXV5S3vrtcnXe6uAOgdGi3aSpqOSXX08IAjXW34xitfuOJsvXU5anXZxPSEQiJw==", + "version": "1.47.0", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.47.0.tgz", + "integrity": "sha512-jOWiRq2pdNAX/mwLiwFYnPHpEZ4rM+fRSQpRHwEwZlP2PUANvL3+aJOF/bvISMhFD30rqMxUB4RJx9aQbfh4Ww==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.46.0" + "playwright-core": "1.47.0" }, "bin": { "playwright": "cli.js" @@ -3666,9 +3674,9 @@ } }, "node_modules/playwright-core": { - "version": "1.46.0", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.46.0.tgz", - "integrity": "sha512-9Y/d5UIwuJk8t3+lhmMSAJyNP1BUC/DqP3cQJDQQL/oWqAiuPTLgy7Q5dzglmTLwcBRdetzgNM/gni7ckfTr6A==", + "version": "1.47.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.47.0.tgz", + "integrity": "sha512-1DyHT8OqkcfCkYUD9zzUTfg7EfTd+6a8MkD/NWOvjo0u/SCNd5YmY/lJwFvUZOxJbWNds+ei7ic2+R/cRz/PDg==", "dev": true, "license": "Apache-2.0", "bin": { diff --git a/web-app/package.json b/web-app/package.json index 79e6551..8715dbd 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -14,11 +14,12 @@ "gents": "pg-to-ts generate -c postgres://postgres@localhost:15432/archtika -o src/lib/db-schema.ts -s internal" }, "devDependencies": { - "@playwright/test": "1.46.0", + "@playwright/test": "1.47.0", "@sveltejs/adapter-auto": "3.2.5", "@sveltejs/adapter-node": "5.2.3", "@sveltejs/kit": "2.5.28", "@sveltejs/vite-plugin-svelte": "4.0.0-next.6", + "@types/diff-match-patch": "1.0.36", "@types/eslint": "9.6.1", "@types/eslint__js": "8.42.3", "@types/eslint-config-prettier": "6.11.3", @@ -38,7 +39,7 @@ }, "type": "module", "dependencies": { - "fast-diff": "1.3.0", + "diff-match-patch": "1.0.5", "highlight.js": "11.10.0", "isomorphic-dompurify": "2.15.0", "marked": "14.1.2", diff --git a/web-app/playwright.config.ts b/web-app/playwright.config.ts index ff4cb3d..436b260 100644 --- a/web-app/playwright.config.ts +++ b/web-app/playwright.config.ts @@ -9,15 +9,36 @@ const config: PlaywrightTestConfig = { baseURL: "http://localhost:4173", video: "retain-on-failure" }, - testDir: "tests", + testDir: "./tests", testMatch: /(.+\.)?(test|spec)\.ts/, - retries: 3, - // Firefox and Webkit are not packaged yet, see https://github.com/NixOS/nixpkgs/issues/288826 + // https://github.com/NixOS/nixpkgs/issues/288826 projects: [ + { + name: "Register users", + testMatch: /global-setup\.ts/, + teardown: "Delete users" + }, + { + name: "Delete users", + testMatch: /global-teardown\.ts/ + }, { name: "Chromium", - use: { ...devices["Desktop Chrome"] } + use: { ...devices["Desktop Chrome"] }, + dependencies: ["Register users"] + }, + { + name: "Firefox", + use: { ...devices["Desktop Firefox"] }, + dependencies: ["Register users"] } + /* + Upstream bug "Error: browserContext.newPage: Target page, context or browser has been closed" + { + name: "Webkit", + use: { ...devices["Desktop Safari"] }, + dependencies: ["Register users"] + } */ ] }; diff --git a/web-app/src/hooks.server.ts b/web-app/src/hooks.server.ts index 73cf58b..b09d74a 100644 --- a/web-app/src/hooks.server.ts +++ b/web-app/src/hooks.server.ts @@ -1,5 +1,6 @@ import { redirect } from "@sveltejs/kit"; import { API_BASE_PREFIX, apiRequest } from "$lib/server/utils"; +import type { User } from "$lib/db-schema"; export const handle = async ({ event, resolve }) => { if (!event.url.pathname.startsWith("/api/")) { @@ -20,6 +21,13 @@ export const handle = async ({ event, resolve }) => { throw redirect(303, "/"); } + if ( + (userData.data as User).user_role !== "administrator" && + event.url.pathname.includes("/manage") + ) { + throw redirect(303, "/"); + } + event.locals.user = userData.data; } } diff --git a/web-app/src/lib/components/LoadingSpinner.svelte b/web-app/src/lib/components/LoadingSpinner.svelte index 12ee598..e349122 100644 --- a/web-app/src/lib/components/LoadingSpinner.svelte +++ b/web-app/src/lib/components/LoadingSpinner.svelte @@ -25,7 +25,7 @@ margin-inline-start: -2rem; border-radius: 50%; border: var(--border-primary); - border-width: 0.125rem; + border-width: 0.25rem; border-block-start-color: var(--color-accent); animation: spinner 500ms linear infinite; } diff --git a/web-app/src/lib/components/MarkdownEditor.svelte b/web-app/src/lib/components/MarkdownEditor.svelte index cbe47c6..024b2a3 100644 --- a/web-app/src/lib/components/MarkdownEditor.svelte +++ b/web-app/src/lib/components/MarkdownEditor.svelte @@ -1,6 +1,8 @@ +{#if pasting} + +{/if} + - + @@ -173,7 +182,7 @@ > - + diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.server.ts index ac124fb..16baf93 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.server.ts @@ -4,12 +4,12 @@ import { apiRequest } from "$lib/server/utils"; import type { Article, DocsCategory } from "$lib/db-schema"; export const load: PageServerLoad = async ({ params, fetch, url, parent, locals }) => { - const searchQuery = url.searchParams.get("article_search_query"); - const filterBy = url.searchParams.get("article_filter"); + const searchQuery = url.searchParams.get("query"); + const filterBy = url.searchParams.get("filter"); - const { website, home } = await parent(); + const { website, home, permissionLevel } = await parent(); - let baseFetchUrl = `${API_BASE_PREFIX}/article?website_id=eq.${params.websiteId}&select=id,title`; + let baseFetchUrl = `${API_BASE_PREFIX}/article?website_id=eq.${params.websiteId}&select=id,user_id,title`; if (website.content_type === "Docs") { baseFetchUrl += ",article_weight,docs_category(category_name,category_weight)&order=docs_category(category_weight).desc.nullslast,article_weight.desc.nullslast"; @@ -21,7 +21,7 @@ export const load: PageServerLoad = async ({ params, fetch, url, parent, locals const parameters = new URLSearchParams(); if (searchQuery) { - parameters.append("title_description_search", `wfts(english).${searchQuery}`); + parameters.append("title", `wfts.${searchQuery}`); } switch (filterBy) { @@ -56,7 +56,9 @@ export const load: PageServerLoad = async ({ params, fetch, url, parent, locals totalArticleCount, articles, website, - home + home, + permissionLevel, + user: locals.user }; }; diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.svelte index a4c4103..7af5c50 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/+page.svelte @@ -40,7 +40,7 @@ - + @@ -56,36 +56,30 @@
- +
    - {#each data.articles as { id, title, article_weight, docs_category } (id)} + {#each data.articles as { id, user_id, title, article_weight, docs_category } (id)}
  • {title} {article_weight ? `(${article_weight})` : ""} @@ -129,7 +123,12 @@ > - + diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.server.ts index e9af2a8..dc906da 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.server.ts @@ -23,9 +23,9 @@ export const load: PageServerLoad = async ({ parent, params, fetch }) => { ) ).data; - const { website } = await parent(); + const { website, permissionLevel } = await parent(); - return { website, article, categories, API_BASE_PREFIX }; + return { website, article, categories, API_BASE_PREFIX, permissionLevel }; }; export const actions: Actions = { @@ -40,7 +40,6 @@ export const actions: Actions = { }; if (coverFile) { - headers["X-Mimetype"] = coverFile.type; headers["X-Original-Filename"] = coverFile.name; } @@ -82,7 +81,6 @@ export const actions: Actions = { "Content-Type": "application/octet-stream", Accept: "application/vnd.pgrst.object+json", "X-Website-Id": params.websiteId, - "X-Mimetype": file.type, "X-Original-Filename": file.name }, body: await file.arrayBuffer(), diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.svelte index 9ae80b2..0e37219 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/articles/[articleId]/+page.svelte @@ -44,14 +44,16 @@ - + {#if data.categories.length > 0} + + {/if} {/if} {#if data.article.cover_image} - + - + diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.server.ts index 0254e87..b155021 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.server.ts @@ -2,7 +2,7 @@ import type { Actions, PageServerLoad } from "./$types"; import { API_BASE_PREFIX, apiRequest } from "$lib/server/utils"; import type { DocsCategory } from "$lib/db-schema"; -export const load: PageServerLoad = async ({ parent, params, fetch }) => { +export const load: PageServerLoad = async ({ parent, params, fetch, locals }) => { const categories: DocsCategory[] = ( await apiRequest( fetch, @@ -14,12 +14,14 @@ export const load: PageServerLoad = async ({ parent, params, fetch }) => { ) ).data; - const { website, home } = await parent(); + const { website, home, permissionLevel } = await parent(); return { categories, website, - home + home, + permissionLevel, + user: locals.user }; }; diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.svelte index d6f43e0..5b8b0ef 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/categories/+page.svelte @@ -44,7 +44,7 @@ - + @@ -56,7 +56,7 @@

      - {#each data.categories as { id, website_id, category_name, category_weight } (`${website_id}-${id}`)} + {#each data.categories as { id, website_id, user_id, category_name, category_weight } (`${website_id}-${id}`)}
    • {category_name} ({category_weight}) @@ -89,7 +89,9 @@ - + @@ -104,7 +106,12 @@ > - + diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.server.ts index 0854363..8641865 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.server.ts @@ -2,7 +2,7 @@ import type { Actions, PageServerLoad } from "./$types"; import { API_BASE_PREFIX, apiRequest } from "$lib/server/utils"; import type { Collab, User } from "$lib/db-schema"; -export const load: PageServerLoad = async ({ parent, params, fetch }) => { +export const load: PageServerLoad = async ({ parent, params, fetch, locals }) => { const collaborators: (Collab & { user: User })[] = ( await apiRequest( fetch, @@ -14,12 +14,14 @@ export const load: PageServerLoad = async ({ parent, params, fetch }) => { ) ).data; - const { website, home } = await parent(); + const { website, home, permissionLevel } = await parent(); return { website, home, - collaborators + collaborators, + permissionLevel, + user: locals.user }; }; diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.svelte index f2efd8e..12b95b1 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/collaborators/+page.svelte @@ -51,7 +51,9 @@ - + @@ -89,7 +91,11 @@ - + @@ -104,7 +110,11 @@ > - + diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.server.ts index 7188599..02aa560 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.server.ts @@ -1,7 +1,5 @@ import type { Actions, PageServerLoad } from "./$types"; import { API_BASE_PREFIX, apiRequest } from "$lib/server/utils"; -import { rm } from "node:fs/promises"; -import { join } from "node:path"; import type { LegalInformation } from "$lib/db-schema"; export const load: PageServerLoad = async ({ parent, fetch, params }) => { @@ -19,12 +17,13 @@ export const load: PageServerLoad = async ({ parent, fetch, params }) => { ) ).data; - const { website } = await parent(); + const { website, permissionLevel } = await parent(); return { legalInformation, website, - API_BASE_PREFIX + API_BASE_PREFIX, + permissionLevel }; }; @@ -58,11 +57,22 @@ export const actions: Actions = { return deleteLegalInformation; } - await rm( - join("/", "var", "www", "archtika-websites", params.websiteId, "legal-information.html"), - { force: true } - ); - return deleteLegalInformation; + }, + pasteImage: async ({ request, fetch, params }) => { + const data = await request.formData(); + const file = data.get("file") as File; + + return await apiRequest(fetch, `${API_BASE_PREFIX}/rpc/upload_file`, "POST", { + headers: { + "Content-Type": "application/octet-stream", + Accept: "application/vnd.pgrst.object+json", + "X-Website-Id": params.websiteId, + "X-Original-Filename": file.name + }, + body: await file.arrayBuffer(), + successMessage: "Successfully uploaded image", + returnData: true + }); } }; diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.svelte index b00f2d0..d87a7db 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/legal-information/+page.svelte @@ -61,7 +61,9 @@ content={data.legalInformation?.main_content ?? ""} /> - + {#if data.legalInformation?.main_content} @@ -76,7 +78,9 @@ Caution! This action will remove the legal information page from the website and delete all data.

      - + {/if} diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.server.ts index 059a671..570e1a8 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.server.ts @@ -1,13 +1,15 @@ -import type { PageServerLoad } from "./$types"; +import type { PageServerLoad, Actions } from "./$types"; import { API_BASE_PREFIX, apiRequest } from "$lib/server/utils"; import type { ChangeLog, User, Collab } from "$lib/db-schema"; +import DiffMatchPatch from "diff-match-patch"; +import { PAGINATION_MAX_ITEMS } from "$lib/utils"; export const load: PageServerLoad = async ({ parent, fetch, params, url }) => { - const userFilter = url.searchParams.get("logs_filter_user"); - const resourceFilter = url.searchParams.get("logs_filter_resource"); - const operationFilter = url.searchParams.get("logs_filter_operation"); - const currentPage = Number.parseInt(url.searchParams.get("logs_results_page") ?? "1"); - const resultOffset = (currentPage - 1) * 50; + const userFilter = url.searchParams.get("user"); + const resourceFilter = url.searchParams.get("resource"); + const operationFilter = url.searchParams.get("operation"); + const currentPage = Number.parseInt(url.searchParams.get("page") ?? "1"); + const resultOffset = (currentPage - 1) * PAGINATION_MAX_ITEMS; const searchParams = new URLSearchParams(); @@ -25,10 +27,13 @@ export const load: PageServerLoad = async ({ parent, fetch, params, url }) => { searchParams.append("operation", `eq.${operationFilter.toUpperCase()}`); } - const constructedFetchUrl = `${baseFetchUrl}&${searchParams.toString()}&limit=50&offset=${resultOffset}`; + const constructedFetchUrl = `${baseFetchUrl}&${searchParams.toString()}&limit=${PAGINATION_MAX_ITEMS}&offset=${resultOffset}`; const changeLog: (ChangeLog & { user: { username: User["username"] } })[] = ( - await apiRequest(fetch, constructedFetchUrl, "GET", { returnData: true }) + await apiRequest(fetch, constructedFetchUrl, "GET", { + headers: { Accept: "application/vnd.pgrst.array+json;nulls=stripped" }, + returnData: true + }) ).data; const resultChangeLogData = await apiRequest(fetch, constructedFetchUrl, "HEAD", { @@ -61,3 +66,49 @@ export const load: PageServerLoad = async ({ parent, fetch, params, url }) => { collaborators }; }; + +export const actions: Actions = { + computeDiff: async ({ request, fetch }) => { + const data = await request.formData(); + + const dmp = new DiffMatchPatch(); + + const htmlDiff = (oldValue: string, newValue: string) => { + const diff = dmp.diff_main(oldValue, newValue); + dmp.diff_cleanupSemantic(diff); + + return diff + .map(([op, text]) => { + switch (op) { + case 1: + return `${text}`; + case -1: + return `${text}`; + default: + return text; + } + }) + .join(""); + }; + + const log: ChangeLog = ( + await apiRequest( + fetch, + `${API_BASE_PREFIX}/change_log?id=eq.${data.get("id")}&select=old_value,new_value`, + "GET", + { + headers: { Accept: "application/vnd.pgrst.object+json;nulls=stripped" }, + returnData: true + } + ) + ).data; + + return { + logId: data.get("id"), + currentDiff: htmlDiff( + JSON.stringify(log.old_value, null, 2), + JSON.stringify(log.new_value, null, 2) + ) + }; + } +}; diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.svelte index 3bcf0ba..9e1a3c9 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/logs/+page.svelte @@ -2,62 +2,46 @@ import WebsiteEditor from "$lib/components/WebsiteEditor.svelte"; import DateTime from "$lib/components/DateTime.svelte"; import Modal from "$lib/components/Modal.svelte"; - import type { PageServerData } from "./$types"; - import diff from "fast-diff"; + import type { PageServerData, ActionData } from "./$types"; import { page } from "$app/stores"; import { tables } from "$lib/db-schema"; import { previewContent } from "$lib/runes.svelte"; - import { sanitize } from "isomorphic-dompurify"; + import DOMPurify from "isomorphic-dompurify"; + import { enhanceForm } from "$lib/utils"; + import { enhance } from "$app/forms"; + import { sending } from "$lib/runes.svelte"; + import LoadingSpinner from "$lib/components/LoadingSpinner.svelte"; + import Pagination from "$lib/components/Pagination.svelte"; - const { data }: { data: PageServerData } = $props(); - - const htmlDiff = (oldValue: string, newValue: string) => { - return diff(oldValue, newValue) - .map(([type, value]) => { - let newString = ""; - - switch (type) { - case 1: - newString += `${value}`; - break; - case 0: - newString += `${value}`; - break; - case -1: - newString += `${value}`; - break; - } - - return newString; - }) - .join(""); - }; + const { data, form }: { data: PageServerData; form: ActionData } = $props(); let resources = $state({}); if (data.website.content_type === "Blog") { // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { user, change_log, media, docs_category, ...restTables } = tables; + const { user, change_log, docs_category, ...restTables } = tables; resources = restTables; } if (data.website.content_type === "Docs") { // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { user, change_log, media, ...restTables } = tables; + const { user, change_log, ...restTables } = tables; resources = restTables; } previewContent.value = data.home.main_content; - - let logsSection: HTMLElement; +{#if sending.value} + +{/if} + -
      +

      Logs @@ -74,8 +58,8 @@ Username: @@ -86,40 +70,33 @@ - - + +
      @@ -129,7 +106,7 @@ User Resource Operation - Date and time + Date & Time Changes @@ -153,12 +130,32 @@

      Log changes

      -

      {table_name} — {operation}

      +

      {table_name} — {operation} — User "{username}"

      -
      {@html sanitize(htmlDiff(oldValue, newValue), {
      -                      ALLOWED_TAGS: ["ins", "del"]
      -                    })}
      + {#if old_value && new_value} +

      Difference

      +
      + + +
      + {#if form?.logId === id && form?.currentDiff} +
      {@html DOMPurify.sanitize(
      +                          form.currentDiff,
      +                          { ALLOWED_TAGS: ["ins", "del"] }
      +                        )}
      + {/if} + {/if} + + {#if new_value && !old_value} +

      New value

      +
      {DOMPurify.sanitize(newValue)}
      + {/if} + + {#if old_value && !new_value} +

      Old value

      +
      {DOMPurify.sanitize(oldValue)}
      + {/if} @@ -166,102 +163,9 @@
      - +

      - - diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.server.ts b/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.server.ts index ea8a1a3..69b944f 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.server.ts +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.server.ts @@ -5,12 +5,12 @@ import BlogIndex from "$lib/templates/blog/BlogIndex.svelte"; import DocsArticle from "$lib/templates/docs/DocsArticle.svelte"; import DocsIndex from "$lib/templates/docs/DocsIndex.svelte"; import { type WebsiteOverview, hexToHSL, slugify } from "$lib/utils"; -import { mkdir, readFile, rename, writeFile } from "node:fs/promises"; +import { mkdir, readFile, rename, writeFile, chmod, readdir } from "node:fs/promises"; import { join } from "node:path"; import { render } from "svelte/server"; import type { Actions, PageServerLoad } from "./$types"; -export const load: PageServerLoad = async ({ params, fetch }) => { +export const load: PageServerLoad = async ({ params, fetch, parent }) => { const websiteOverview: WebsiteOverview = ( await apiRequest( fetch, @@ -25,29 +25,15 @@ export const load: PageServerLoad = async ({ params, fetch }) => { ) ).data; - generateStaticFiles(websiteOverview); + const { websitePreviewUrl, websiteProdUrl } = await generateStaticFiles(websiteOverview); - const websitePreviewUrl = `${ - dev - ? "http://localhost:18000" - : process.env.ORIGIN - ? process.env.ORIGIN - : "http://localhost:18000" - }/previews/${websiteOverview.id}/`; - - const websiteProdUrl = dev - ? `http://localhost:18000/${websiteOverview.domain_prefix?.prefix ?? websiteOverview.id}/` - : process.env.ORIGIN - ? process.env.ORIGIN.replace( - "//", - `//${websiteOverview.domain_prefix?.prefix ?? websiteOverview.id}.` - ) - : `http://localhost:18000/${websiteOverview.domain_prefix?.prefix ?? websiteOverview.id}/`; + const { permissionLevel } = await parent(); return { websiteOverview, websitePreviewUrl, - websiteProdUrl + websiteProdUrl, + permissionLevel }; }; @@ -67,7 +53,7 @@ export const actions: Actions = { ) ).data; - generateStaticFiles(websiteOverview, false); + await generateStaticFiles(websiteOverview, false); return await apiRequest( fetch, @@ -156,6 +142,23 @@ export const actions: Actions = { }; const generateStaticFiles = async (websiteData: WebsiteOverview, isPreview = true) => { + const websitePreviewUrl = `${ + dev + ? "http://localhost:18000" + : process.env.ORIGIN + ? process.env.ORIGIN + : "http://localhost:18000" + }/previews/${websiteData.id}/`; + + const websiteProdUrl = dev + ? `http://localhost:18000/${websiteData.domain_prefix?.prefix ?? websiteData.id}/` + : process.env.ORIGIN + ? process.env.ORIGIN.replace( + "//", + `//${websiteData.domain_prefix?.prefix ?? websiteData.id}.` + ) + : `http://localhost:18000/${websiteData.domain_prefix?.prefix ?? websiteData.id}/`; + const fileContents = (head: string, body: string) => { return ` @@ -173,7 +176,8 @@ const generateStaticFiles = async (websiteData: WebsiteOverview, isPreview = tru props: { websiteOverview: websiteData, apiUrl: API_BASE_PREFIX, - isLegalPage: false + isLegalPage: false, + websiteUrl: isPreview ? websitePreviewUrl : websiteProdUrl } }); @@ -202,7 +206,8 @@ const generateStaticFiles = async (websiteData: WebsiteOverview, isPreview = tru props: { websiteOverview: websiteData, article, - apiUrl: API_BASE_PREFIX + apiUrl: API_BASE_PREFIX, + websiteUrl: isPreview ? websitePreviewUrl : websiteProdUrl } }); @@ -217,13 +222,17 @@ const generateStaticFiles = async (websiteData: WebsiteOverview, isPreview = tru props: { websiteOverview: websiteData, apiUrl: API_BASE_PREFIX, - isLegalPage: true + isLegalPage: true, + websiteUrl: isPreview ? websitePreviewUrl : websiteProdUrl } }); await writeFile(join(uploadDir, "legal-information.html"), fileContents(head, body)); } + const variableStyles = await readFile(`${process.cwd()}/template-styles/variables.css`, { + encoding: "utf-8" + }); const commonStyles = await readFile(`${process.cwd()}/template-styles/common-styles.css`, { encoding: "utf-8" }); @@ -246,22 +255,58 @@ const generateStaticFiles = async (websiteData: WebsiteOverview, isPreview = tru } = hexToHSL(websiteData.settings.background_color_light_theme); await writeFile( - join(uploadDir, "styles.css"), - commonStyles - .concat(specificStyles) - .replace(/(?<=\/\* BACKGROUND_COLOR_DARK_THEME_H \*\/\s*).*(?=;)/, ` ${hDark}`) - .replace(/(?<=\/\* BACKGROUND_COLOR_DARK_THEME_S \*\/\s*).*(?=;)/, ` ${sDark}%`) - .replace(/(?<=\/\* BACKGROUND_COLOR_DARK_THEME_L \*\/\s*).*(?=;)/, ` ${lDark}%`) - .replace(/(?<=\/\* BACKGROUND_COLOR_LIGHT_THEME_H \*\/\s*).*(?=;)/, ` ${hLight}`) - .replace(/(?<=\/\* BACKGROUND_COLOR_LIGHT_THEME_S \*\/\s*).*(?=;)/, ` ${sLight}%`) - .replace(/(?<=\/\* BACKGROUND_COLOR_LIGHT_THEME_L \*\/\s*).*(?=;)/, ` ${lLight}%`) - .replace( - /(?<=\/\* ACCENT_COLOR_DARK_THEME \*\/\s*).*(?=;)/, - ` ${websiteData.settings.accent_color_dark_theme}` + join(uploadDir, "variables.css"), + variableStyles + .replaceAll( + /\/\* BACKGROUND_COLOR_DARK_THEME_H \*\/\s*.*?;/g, + `/* BACKGROUND_COLOR_DARK_THEME_H */ ${hDark};` ) - .replace( - /(?<=\/\* ACCENT_COLOR_LIGHT_THEME \*\/\s*).*(?=;)/, - ` ${websiteData.settings.accent_color_light_theme}` + .replaceAll( + /\/\* BACKGROUND_COLOR_DARK_THEME_S \*\/\s*.*?;/g, + `/* BACKGROUND_COLOR_DARK_THEME_S */ ${sDark}%;` + ) + .replaceAll( + /\/\* BACKGROUND_COLOR_DARK_THEME_L \*\/\s*.*?;/g, + `/* BACKGROUND_COLOR_DARK_THEME_L */ ${lDark}%;` + ) + .replaceAll( + /\/\* BACKGROUND_COLOR_LIGHT_THEME_H \*\/\s*.*?;/g, + `/* BACKGROUND_COLOR_LIGHT_THEME_H */ ${hLight};` + ) + .replaceAll( + /\/\* BACKGROUND_COLOR_LIGHT_THEME_S \*\/\s*.*?;/g, + `/* BACKGROUND_COLOR_LIGHT_THEME_S */ ${sLight}%;` + ) + .replaceAll( + /\/\* BACKGROUND_COLOR_LIGHT_THEME_L \*\/\s*.*?;/g, + `/* BACKGROUND_COLOR_LIGHT_THEME_L */ ${lLight}%;` + ) + .replaceAll( + /\/\* ACCENT_COLOR_DARK_THEME \*\/\s*.*?;/g, + `/* ACCENT_COLOR_DARK_THEME */ ${websiteData.settings.accent_color_dark_theme};` + ) + .replaceAll( + /\/\* ACCENT_COLOR_LIGHT_THEME \*\/\s*.*?;/g, + `/* ACCENT_COLOR_LIGHT_THEME */ ${websiteData.settings.accent_color_light_theme};` ) ); + await writeFile(join(uploadDir, "common.css"), commonStyles); + await writeFile(join(uploadDir, "scoped.css"), specificStyles); + + await setPermissions(isPreview ? join(uploadDir, "../") : uploadDir); + + return { websitePreviewUrl, websiteProdUrl }; +}; + +const setPermissions = async (dir: string) => { + await chmod(dir, 0o777); + const entries = await readdir(dir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = join(dir, entry.name); + if (entry.isDirectory()) { + await setPermissions(fullPath); + } else { + await chmod(fullPath, 0o777); + } + } }; diff --git a/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.svelte b/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.svelte index 51d231f..27f545f 100644 --- a/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.svelte +++ b/web-app/src/routes/(authenticated)/website/[websiteId]/publish/+page.svelte @@ -36,7 +36,9 @@ be published on the Internet.

      - +
      @@ -46,8 +48,7 @@ Publication status

      - Your website is published at: -
      + Your website is published at:
      {data.websiteProdUrl}

      @@ -74,7 +75,9 @@ required /> - + {#if data.websiteOverview.domain_prefix?.prefix} @@ -88,7 +91,9 @@ Caution! This action will remove the domain prefix and reset it to its initial value.

      - +
      {/if} diff --git a/web-app/src/routes/+layout.svelte b/web-app/src/routes/+layout.svelte index 5dac755..05597ef 100644 --- a/web-app/src/routes/+layout.svelte +++ b/web-app/src/routes/+layout.svelte @@ -1,4 +1,5 @@