From bd5a92f4ad854dc010bd4ddedd85bf4bfb18bffc Mon Sep 17 00:00:00 2001 From: Garry Hill Date: Mon, 19 Feb 2024 11:22:32 +0000 Subject: [PATCH] Manage global and per-user permissions state --- .changeset/gentle-ducks-kneel.md | 5 + .../src/_generated/protocol/satellite.ts | 280 ++-- .../electric/lib/electric/ddlx/command.ex | 86 +- .../lib/electric/ddlx/command/assign.ex | 25 +- .../lib/electric/ddlx/command/grant.ex | 4 +- .../lib/electric/ddlx/command/revoke.ex | 4 +- .../lib/electric/ddlx/command/sqlite.ex | 2 +- .../lib/electric/ddlx/command/unassign.ex | 23 +- .../lib/electric/postgres/extension.ex | 23 +- .../migrations/20230814170745_electric_ddl.ex | 77 - .../ddlx_init.sql.eex | 663 -------- .../20240212161153_ddlx_commands.ex | 12 - .../20240214131615_permissions_state.ex | 89 ++ .../postgres/extension/permissions.ex | 149 ++ .../postgres/extension/permissions_loader.ex | 6 + .../postgres/extension/schema_cache.ex | 80 +- .../postgres/extension/schema_loader.ex | 78 +- .../extension/schema_loader/epgsql.ex | 76 +- .../extension/schema_loader/version.ex | 15 +- .../lib/electric/postgres/proxy/handler.ex | 2 +- .../lib/electric/replication/changes.ex | 31 + .../postgres/migration_consumer.ex | 82 +- .../lib/electric/satellite/permissions.ex | 143 +- .../satellite/permissions/consumer.ex | 345 +++++ .../electric/satellite/permissions/graph.ex | 18 - .../electric/satellite/permissions/role.ex | 2 +- .../electric/satellite/permissions/trigger.ex | 239 ++- .../satellite/permissions/write_buffer.ex | 27 +- .../electric/satellite/protobuf_messages.ex | 812 ++++++++-- .../lib/electric/satellite/protocol.ex | 29 + .../ddlx/assign.sql.eex | 399 ----- .../sql_function_templates/ddlx/grant.sql.eex | 24 - .../ddlx/unassign.sql.eex | 45 - .../find_fk_to_table.sql.eex | 47 - .../electric/src/electric_ddlx_parser.yrl | 13 +- .../test/electric/ddlx/command_test.exs | 31 +- .../test/electric/ddlx/ddlx_commands_test.exs | 165 -- .../test/electric/ddlx/ddlx_postgres_test.exs | 1159 -------------- .../electric/ddlx/parser/tokenizer_test.exs | 15 + .../test/electric/ddlx/parser_test.exs | 105 +- .../extension/schema_loader/epgsql_test.exs | 269 ++++ .../postgres/migration_consumer_test.exs | 725 ++++++--- .../satellite/permissions/consumer_test.exs | 1334 +++++++++++++++++ .../satellite/permissions/join_table_test.exs | 78 +- .../satellite/permissions/transient_test.exs | 12 +- .../satellite/permissions/trigger_test.exs | 139 +- .../electric/satellite/permissions_test.exs | 235 ++- .../test/support/mock_schema_loader.ex | 265 +++- .../test/support/permissions_helpers.ex | 83 +- .../06.02_permissions_change_propagation.lux | 96 ++ e2e/tests/compose.yaml | 1 + protocol/satellite.proto | 32 +- 52 files changed, 5211 insertions(+), 3488 deletions(-) create mode 100644 .changeset/gentle-ducks-kneel.md delete mode 100644 components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex delete mode 100644 components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex create mode 100644 components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex create mode 100644 components/electric/lib/electric/postgres/extension/permissions.ex create mode 100644 components/electric/lib/electric/postgres/extension/permissions_loader.ex create mode 100644 components/electric/lib/electric/satellite/permissions/consumer.ex delete mode 100644 components/electric/priv/sql_function_templates/ddlx/assign.sql.eex delete mode 100644 components/electric/priv/sql_function_templates/ddlx/grant.sql.eex delete mode 100644 components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex delete mode 100644 components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex delete mode 100644 components/electric/test/electric/ddlx/ddlx_commands_test.exs delete mode 100644 components/electric/test/electric/ddlx/ddlx_postgres_test.exs create mode 100644 components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs create mode 100644 components/electric/test/electric/satellite/permissions/consumer_test.exs create mode 100644 e2e/tests/06.02_permissions_change_propagation.lux diff --git a/.changeset/gentle-ducks-kneel.md b/.changeset/gentle-ducks-kneel.md new file mode 100644 index 0000000000..91a3419305 --- /dev/null +++ b/.changeset/gentle-ducks-kneel.md @@ -0,0 +1,5 @@ +--- +"@core/electric": patch +--- + +Add management of immutable global and per-user permissions state diff --git a/clients/typescript/src/_generated/protocol/satellite.ts b/clients/typescript/src/_generated/protocol/satellite.ts index 306713f232..d458a87d6c 100644 --- a/clients/typescript/src/_generated/protocol/satellite.ts +++ b/clients/typescript/src/_generated/protocol/satellite.ts @@ -593,9 +593,14 @@ export interface SatShapeDataEnd { */ export interface SatPerms { $type: "Electric.Satellite.SatPerms"; + /** + * this id is the id of the user permissions, this struct is the user + * permissions fused with the global permissions at that point in time + */ id: Long; + userId: string; rules: SatPerms_Rules | undefined; - roles: SatPerms_Roles | undefined; + roles: SatPerms_Role[]; } export enum SatPerms_Privilege { @@ -632,7 +637,7 @@ export interface SatPerms_Path { export interface SatPerms_Scope { $type: "Electric.Satellite.SatPerms.Scope"; table: SatPerms_Table | undefined; - id: string; + id: string[]; } export interface SatPerms_RoleName { @@ -660,6 +665,7 @@ export interface SatPerms_Grant { export interface SatPerms_Revoke { $type: "Electric.Satellite.SatPerms.Revoke"; + id: string; table: SatPerms_Table | undefined; role: SatPerms_RoleName | undefined; privilege: SatPerms_Privilege; @@ -680,6 +686,7 @@ export interface SatPerms_Assign { export interface SatPerms_Unassign { $type: "Electric.Satellite.SatPerms.Unassign"; + id: string; table: SatPerms_Table | undefined; userColumn?: string | undefined; roleColumn?: string | undefined; @@ -694,34 +701,39 @@ export interface SatPerms_Sqlite { export interface SatPerms_Role { $type: "Electric.Satellite.SatPerms.Role"; - id: string; + rowId: string[]; role: string; userId: string; assignId: string; scope?: SatPerms_Scope | undefined; } +export interface SatPerms_DDLX { + $type: "Electric.Satellite.SatPerms.DDLX"; + grants: SatPerms_Grant[]; + revokes: SatPerms_Revoke[]; + assigns: SatPerms_Assign[]; + unassigns: SatPerms_Unassign[]; + sqlite: SatPerms_Sqlite[]; +} + /** * split the rules and roles info into distinct messages so they can be * serialized separately */ export interface SatPerms_Rules { $type: "Electric.Satellite.SatPerms.Rules"; + id: Long; + parentId?: Long | undefined; grants: SatPerms_Grant[]; assigns: SatPerms_Assign[]; } -export interface SatPerms_DDLX { - $type: "Electric.Satellite.SatPerms.DDLX"; - grants: SatPerms_Grant[]; - revokes: SatPerms_Revoke[]; - assigns: SatPerms_Assign[]; - unassigns: SatPerms_Unassign[]; - sqlite: SatPerms_Sqlite[]; -} - export interface SatPerms_Roles { $type: "Electric.Satellite.SatPerms.Roles"; + id: Long; + parentId?: Long | undefined; + rulesId: Long; roles: SatPerms_Role[]; } @@ -3731,7 +3743,7 @@ export const SatShapeDataEnd = { messageTypeRegistry.set(SatShapeDataEnd.$type, SatShapeDataEnd); function createBaseSatPerms(): SatPerms { - return { $type: "Electric.Satellite.SatPerms", id: Long.ZERO, rules: undefined, roles: undefined }; + return { $type: "Electric.Satellite.SatPerms", id: Long.ZERO, userId: "", rules: undefined, roles: [] }; } export const SatPerms = { @@ -3741,11 +3753,14 @@ export const SatPerms = { if (!message.id.isZero()) { writer.uint32(8).int64(message.id); } + if (message.userId !== "") { + writer.uint32(18).string(message.userId); + } if (message.rules !== undefined) { SatPerms_Rules.encode(message.rules, writer.uint32(26).fork()).ldelim(); } - if (message.roles !== undefined) { - SatPerms_Roles.encode(message.roles, writer.uint32(34).fork()).ldelim(); + for (const v of message.roles) { + SatPerms_Role.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, @@ -3764,6 +3779,13 @@ export const SatPerms = { message.id = reader.int64() as Long; continue; + case 2: + if (tag !== 18) { + break; + } + + message.userId = reader.string(); + continue; case 3: if (tag !== 26) { break; @@ -3776,7 +3798,7 @@ export const SatPerms = { break; } - message.roles = SatPerms_Roles.decode(reader, reader.uint32()); + message.roles.push(SatPerms_Role.decode(reader, reader.uint32())); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -3794,12 +3816,11 @@ export const SatPerms = { fromPartial, I>>(object: I): SatPerms { const message = createBaseSatPerms(); message.id = (object.id !== undefined && object.id !== null) ? Long.fromValue(object.id) : Long.ZERO; + message.userId = object.userId ?? ""; message.rules = (object.rules !== undefined && object.rules !== null) ? SatPerms_Rules.fromPartial(object.rules) : undefined; - message.roles = (object.roles !== undefined && object.roles !== null) - ? SatPerms_Roles.fromPartial(object.roles) - : undefined; + message.roles = object.roles?.map((e) => SatPerms_Role.fromPartial(e)) || []; return message; }, }; @@ -3981,7 +4002,7 @@ export const SatPerms_Path = { messageTypeRegistry.set(SatPerms_Path.$type, SatPerms_Path); function createBaseSatPerms_Scope(): SatPerms_Scope { - return { $type: "Electric.Satellite.SatPerms.Scope", table: undefined, id: "" }; + return { $type: "Electric.Satellite.SatPerms.Scope", table: undefined, id: [] }; } export const SatPerms_Scope = { @@ -3991,8 +4012,8 @@ export const SatPerms_Scope = { if (message.table !== undefined) { SatPerms_Table.encode(message.table, writer.uint32(10).fork()).ldelim(); } - if (message.id !== "") { - writer.uint32(18).string(message.id); + for (const v of message.id) { + writer.uint32(18).string(v!); } return writer; }, @@ -4016,7 +4037,7 @@ export const SatPerms_Scope = { break; } - message.id = reader.string(); + message.id.push(reader.string()); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -4036,7 +4057,7 @@ export const SatPerms_Scope = { message.table = (object.table !== undefined && object.table !== null) ? SatPerms_Table.fromPartial(object.table) : undefined; - message.id = object.id ?? ""; + message.id = object.id?.map((e) => e) || []; return message; }, }; @@ -4304,6 +4325,7 @@ messageTypeRegistry.set(SatPerms_Grant.$type, SatPerms_Grant); function createBaseSatPerms_Revoke(): SatPerms_Revoke { return { $type: "Electric.Satellite.SatPerms.Revoke", + id: "", table: undefined, role: undefined, privilege: 0, @@ -4316,6 +4338,9 @@ export const SatPerms_Revoke = { $type: "Electric.Satellite.SatPerms.Revoke" as const, encode(message: SatPerms_Revoke, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } if (message.table !== undefined) { SatPerms_Table.encode(message.table, writer.uint32(18).fork()).ldelim(); } @@ -4341,6 +4366,13 @@ export const SatPerms_Revoke = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; case 2: if (tag !== 18) { break; @@ -4391,6 +4423,7 @@ export const SatPerms_Revoke = { fromPartial, I>>(object: I): SatPerms_Revoke { const message = createBaseSatPerms_Revoke(); + message.id = object.id ?? ""; message.table = (object.table !== undefined && object.table !== null) ? SatPerms_Table.fromPartial(object.table) : undefined; @@ -4542,6 +4575,7 @@ messageTypeRegistry.set(SatPerms_Assign.$type, SatPerms_Assign); function createBaseSatPerms_Unassign(): SatPerms_Unassign { return { $type: "Electric.Satellite.SatPerms.Unassign", + id: "", table: undefined, userColumn: undefined, roleColumn: undefined, @@ -4554,6 +4588,9 @@ export const SatPerms_Unassign = { $type: "Electric.Satellite.SatPerms.Unassign" as const, encode(message: SatPerms_Unassign, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } if (message.table !== undefined) { SatPerms_Table.encode(message.table, writer.uint32(18).fork()).ldelim(); } @@ -4579,6 +4616,13 @@ export const SatPerms_Unassign = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; case 2: if (tag !== 18) { break; @@ -4629,6 +4673,7 @@ export const SatPerms_Unassign = { fromPartial, I>>(object: I): SatPerms_Unassign { const message = createBaseSatPerms_Unassign(); + message.id = object.id ?? ""; message.table = (object.table !== undefined && object.table !== null) ? SatPerms_Table.fromPartial(object.table) : undefined; @@ -4695,15 +4740,15 @@ export const SatPerms_Sqlite = { messageTypeRegistry.set(SatPerms_Sqlite.$type, SatPerms_Sqlite); function createBaseSatPerms_Role(): SatPerms_Role { - return { $type: "Electric.Satellite.SatPerms.Role", id: "", role: "", userId: "", assignId: "", scope: undefined }; + return { $type: "Electric.Satellite.SatPerms.Role", rowId: [], role: "", userId: "", assignId: "", scope: undefined }; } export const SatPerms_Role = { $type: "Electric.Satellite.SatPerms.Role" as const, encode(message: SatPerms_Role, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.id !== "") { - writer.uint32(10).string(message.id); + for (const v of message.rowId) { + writer.uint32(10).string(v!); } if (message.role !== "") { writer.uint32(18).string(message.role); @@ -4732,7 +4777,7 @@ export const SatPerms_Role = { break; } - message.id = reader.string(); + message.rowId.push(reader.string()); continue; case 2: if (tag !== 18) { @@ -4777,7 +4822,7 @@ export const SatPerms_Role = { fromPartial, I>>(object: I): SatPerms_Role { const message = createBaseSatPerms_Role(); - message.id = object.id ?? ""; + message.rowId = object.rowId?.map((e) => e) || []; message.role = object.role ?? ""; message.userId = object.userId ?? ""; message.assignId = object.assignId ?? ""; @@ -4790,27 +4835,36 @@ export const SatPerms_Role = { messageTypeRegistry.set(SatPerms_Role.$type, SatPerms_Role); -function createBaseSatPerms_Rules(): SatPerms_Rules { - return { $type: "Electric.Satellite.SatPerms.Rules", grants: [], assigns: [] }; +function createBaseSatPerms_DDLX(): SatPerms_DDLX { + return { $type: "Electric.Satellite.SatPerms.DDLX", grants: [], revokes: [], assigns: [], unassigns: [], sqlite: [] }; } -export const SatPerms_Rules = { - $type: "Electric.Satellite.SatPerms.Rules" as const, +export const SatPerms_DDLX = { + $type: "Electric.Satellite.SatPerms.DDLX" as const, - encode(message: SatPerms_Rules, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode(message: SatPerms_DDLX, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { for (const v of message.grants) { SatPerms_Grant.encode(v!, writer.uint32(10).fork()).ldelim(); } + for (const v of message.revokes) { + SatPerms_Revoke.encode(v!, writer.uint32(18).fork()).ldelim(); + } for (const v of message.assigns) { - SatPerms_Assign.encode(v!, writer.uint32(18).fork()).ldelim(); + SatPerms_Assign.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.unassigns) { + SatPerms_Unassign.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.sqlite) { + SatPerms_Sqlite.encode(v!, writer.uint32(42).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_Rules { + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_DDLX { const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSatPerms_Rules(); + const message = createBaseSatPerms_DDLX(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -4826,8 +4880,29 @@ export const SatPerms_Rules = { break; } + message.revokes.push(SatPerms_Revoke.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + message.assigns.push(SatPerms_Assign.decode(reader, reader.uint32())); continue; + case 4: + if (tag !== 34) { + break; + } + + message.unassigns.push(SatPerms_Unassign.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.sqlite.push(SatPerms_Sqlite.decode(reader, reader.uint32())); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -4837,87 +4912,80 @@ export const SatPerms_Rules = { return message; }, - create, I>>(base?: I): SatPerms_Rules { - return SatPerms_Rules.fromPartial(base ?? {}); + create, I>>(base?: I): SatPerms_DDLX { + return SatPerms_DDLX.fromPartial(base ?? {}); }, - fromPartial, I>>(object: I): SatPerms_Rules { - const message = createBaseSatPerms_Rules(); + fromPartial, I>>(object: I): SatPerms_DDLX { + const message = createBaseSatPerms_DDLX(); message.grants = object.grants?.map((e) => SatPerms_Grant.fromPartial(e)) || []; + message.revokes = object.revokes?.map((e) => SatPerms_Revoke.fromPartial(e)) || []; message.assigns = object.assigns?.map((e) => SatPerms_Assign.fromPartial(e)) || []; + message.unassigns = object.unassigns?.map((e) => SatPerms_Unassign.fromPartial(e)) || []; + message.sqlite = object.sqlite?.map((e) => SatPerms_Sqlite.fromPartial(e)) || []; return message; }, }; -messageTypeRegistry.set(SatPerms_Rules.$type, SatPerms_Rules); +messageTypeRegistry.set(SatPerms_DDLX.$type, SatPerms_DDLX); -function createBaseSatPerms_DDLX(): SatPerms_DDLX { - return { $type: "Electric.Satellite.SatPerms.DDLX", grants: [], revokes: [], assigns: [], unassigns: [], sqlite: [] }; +function createBaseSatPerms_Rules(): SatPerms_Rules { + return { $type: "Electric.Satellite.SatPerms.Rules", id: Long.UZERO, parentId: undefined, grants: [], assigns: [] }; } -export const SatPerms_DDLX = { - $type: "Electric.Satellite.SatPerms.DDLX" as const, +export const SatPerms_Rules = { + $type: "Electric.Satellite.SatPerms.Rules" as const, - encode(message: SatPerms_DDLX, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - for (const v of message.grants) { - SatPerms_Grant.encode(v!, writer.uint32(10).fork()).ldelim(); - } - for (const v of message.revokes) { - SatPerms_Revoke.encode(v!, writer.uint32(18).fork()).ldelim(); + encode(message: SatPerms_Rules, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); } - for (const v of message.assigns) { - SatPerms_Assign.encode(v!, writer.uint32(26).fork()).ldelim(); + if (message.parentId !== undefined) { + writer.uint32(16).uint64(message.parentId); } - for (const v of message.unassigns) { - SatPerms_Unassign.encode(v!, writer.uint32(34).fork()).ldelim(); + for (const v of message.grants) { + SatPerms_Grant.encode(v!, writer.uint32(26).fork()).ldelim(); } - for (const v of message.sqlite) { - SatPerms_Sqlite.encode(v!, writer.uint32(42).fork()).ldelim(); + for (const v of message.assigns) { + SatPerms_Assign.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_DDLX { + decode(input: _m0.Reader | Uint8Array, length?: number): SatPerms_Rules { const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSatPerms_DDLX(); + const message = createBaseSatPerms_Rules(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (tag !== 10) { + if (tag !== 8) { break; } - message.grants.push(SatPerms_Grant.decode(reader, reader.uint32())); + message.id = reader.uint64() as Long; continue; case 2: - if (tag !== 18) { + if (tag !== 16) { break; } - message.revokes.push(SatPerms_Revoke.decode(reader, reader.uint32())); + message.parentId = reader.uint64() as Long; continue; case 3: if (tag !== 26) { break; } - message.assigns.push(SatPerms_Assign.decode(reader, reader.uint32())); + message.grants.push(SatPerms_Grant.decode(reader, reader.uint32())); continue; case 4: if (tag !== 34) { break; } - message.unassigns.push(SatPerms_Unassign.decode(reader, reader.uint32())); - continue; - case 5: - if (tag !== 42) { - break; - } - - message.sqlite.push(SatPerms_Sqlite.decode(reader, reader.uint32())); + message.assigns.push(SatPerms_Assign.decode(reader, reader.uint32())); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -4928,33 +4996,49 @@ export const SatPerms_DDLX = { return message; }, - create, I>>(base?: I): SatPerms_DDLX { - return SatPerms_DDLX.fromPartial(base ?? {}); + create, I>>(base?: I): SatPerms_Rules { + return SatPerms_Rules.fromPartial(base ?? {}); }, - fromPartial, I>>(object: I): SatPerms_DDLX { - const message = createBaseSatPerms_DDLX(); + fromPartial, I>>(object: I): SatPerms_Rules { + const message = createBaseSatPerms_Rules(); + message.id = (object.id !== undefined && object.id !== null) ? Long.fromValue(object.id) : Long.UZERO; + message.parentId = (object.parentId !== undefined && object.parentId !== null) + ? Long.fromValue(object.parentId) + : undefined; message.grants = object.grants?.map((e) => SatPerms_Grant.fromPartial(e)) || []; - message.revokes = object.revokes?.map((e) => SatPerms_Revoke.fromPartial(e)) || []; message.assigns = object.assigns?.map((e) => SatPerms_Assign.fromPartial(e)) || []; - message.unassigns = object.unassigns?.map((e) => SatPerms_Unassign.fromPartial(e)) || []; - message.sqlite = object.sqlite?.map((e) => SatPerms_Sqlite.fromPartial(e)) || []; return message; }, }; -messageTypeRegistry.set(SatPerms_DDLX.$type, SatPerms_DDLX); +messageTypeRegistry.set(SatPerms_Rules.$type, SatPerms_Rules); function createBaseSatPerms_Roles(): SatPerms_Roles { - return { $type: "Electric.Satellite.SatPerms.Roles", roles: [] }; + return { + $type: "Electric.Satellite.SatPerms.Roles", + id: Long.UZERO, + parentId: undefined, + rulesId: Long.UZERO, + roles: [], + }; } export const SatPerms_Roles = { $type: "Electric.Satellite.SatPerms.Roles" as const, encode(message: SatPerms_Roles, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + if (message.parentId !== undefined) { + writer.uint32(16).uint64(message.parentId); + } + if (!message.rulesId.isZero()) { + writer.uint32(24).uint64(message.rulesId); + } for (const v of message.roles) { - SatPerms_Role.encode(v!, writer.uint32(18).fork()).ldelim(); + SatPerms_Role.encode(v!, writer.uint32(34).fork()).ldelim(); } return writer; }, @@ -4966,8 +5050,29 @@ export const SatPerms_Roles = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.id = reader.uint64() as Long; + continue; case 2: - if (tag !== 18) { + if (tag !== 16) { + break; + } + + message.parentId = reader.uint64() as Long; + continue; + case 3: + if (tag !== 24) { + break; + } + + message.rulesId = reader.uint64() as Long; + continue; + case 4: + if (tag !== 34) { break; } @@ -4988,6 +5093,13 @@ export const SatPerms_Roles = { fromPartial, I>>(object: I): SatPerms_Roles { const message = createBaseSatPerms_Roles(); + message.id = (object.id !== undefined && object.id !== null) ? Long.fromValue(object.id) : Long.UZERO; + message.parentId = (object.parentId !== undefined && object.parentId !== null) + ? Long.fromValue(object.parentId) + : undefined; + message.rulesId = (object.rulesId !== undefined && object.rulesId !== null) + ? Long.fromValue(object.rulesId) + : Long.UZERO; message.roles = object.roles?.map((e) => SatPerms_Role.fromPartial(e)) || []; return message; }, diff --git a/components/electric/lib/electric/ddlx/command.ex b/components/electric/lib/electric/ddlx/command.ex index c3a998ee5e..7c32675ac0 100644 --- a/components/electric/lib/electric/ddlx/command.ex +++ b/components/electric/lib/electric/ddlx/command.ex @@ -18,7 +18,33 @@ defmodule Electric.DDLX.Command do tables: [Electric.Postgres.relation()] } - def tag(%__MODULE__{tag: tag}), do: tag + def tag(%__MODULE__{tag: tag}) do + tag + end + + @perms_with_ids [:assigns, :unassigns, :grants, :revokes] + @perms_without_ids [:sqlite] + + def ddlx(cmds) do + ddlx = + Enum.reduce(@perms_with_ids, %SatPerms.DDLX{}, fn type, ddlx -> + Map.update!(ddlx, type, fn [] -> + cmds + |> Keyword.get(type, []) + |> Enum.map(&put_id/1) + end) + end) + + Enum.reduce(@perms_without_ids, ddlx, &Map.put(&2, &1, Keyword.get(cmds, &1, []))) + end + + def put_id(%{id: id} = cmd) when is_struct(cmd) and id in ["", nil] do + Map.put(cmd, :id, command_id(cmd)) + end + + def put_id(cmd) when is_struct(cmd) do + cmd + end def pg_sql(cmd) do PgSQL.to_sql(cmd) @@ -108,6 +134,17 @@ defmodule Electric.DDLX.Command do ]) end + def command_id(%SatPerms.Sqlite{} = sqlite) do + hash([ + sqlite.stmt + ]) + end + + # hash the given terms in the struct together. `SHA1` is chosen because it is smaller in terms + # of bytes, rather than for any cryptographic reason. Since the hash/id is used in the naming of + # triggers and tables within pg, a bigger hash, such as `SHA256`, would use too many of the 64 + # available bytes for these pg objects. This is the same reason to use encode32 rather than + # encode16 -- it just eats fewer of the available characters. defp hash(terms) do terms |> Enum.map(&fingerprint/1) @@ -185,54 +222,19 @@ defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Revoke do end defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Assign do - import Electric.DDLX.Command.Common - - def to_sql(%SatPerms.Assign{} = assign) do - id = Electric.DDLX.Command.command_id(assign) - - [ - """ - CALL electric.assign( - assignment_id => #{sql_repr(id)}, - assign_table_full_name => #{sql_repr(assign.table)}, - scope => #{sql_repr(assign.scope)}, - user_column_name => #{sql_repr(assign.user_column)}, - role_name_string => #{sql_repr(assign.role_name)}, - role_column_name => #{sql_repr(assign.role_column)}, - if_fn => #{sql_repr(assign.if)} - ); - """ - ] + def to_sql(%SatPerms.Assign{} = _assign) do + [] end end defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Unassign do - import Electric.DDLX.Command.Common - - def to_sql(%SatPerms.Unassign{} = unassign) do - id = Electric.DDLX.Command.command_id(unassign) - - [ - """ - CALL electric.unassign( - assignment_id => #{sql_repr(id)}, - assign_table_full_name => #{sql_repr(unassign.table)}, - scope => #{sql_repr(unassign.scope)}, - user_column_name => #{sql_repr(unassign.user_column)}, - role_name_string => #{sql_repr(unassign.role_name)}, - role_column_name => #{sql_repr(unassign.role_column)} - ); - """ - ] + def to_sql(%SatPerms.Unassign{} = _unassign) do + [] end end defimpl Electric.DDLX.Command.PgSQL, for: SatPerms.Sqlite do - def to_sql(%SatPerms.Sqlite{stmt: stmt}) when is_binary(stmt) do - [ - """ - CALL electric.sqlite(sql => $sqlite$#{stmt}$sqlite$); - """ - ] + def to_sql(%SatPerms.Sqlite{} = _sqlite) do + [] end end diff --git a/components/electric/lib/electric/ddlx/command/assign.ex b/components/electric/lib/electric/ddlx/command/assign.ex index acece4ac06..ffaccaa84b 100644 --- a/components/electric/lib/electric/ddlx/command/assign.ex +++ b/components/electric/lib/electric/ddlx/command/assign.ex @@ -22,18 +22,19 @@ defmodule Electric.DDLX.Command.Assign do {:ok, %Command{ - cmds: %SatPerms.DDLX{ - assigns: [ - %SatPerms.Assign{ - table: pb_table(attrs[:table_name]), - user_column: attrs[:user_column], - role_column: attrs[:role_column], - role_name: attrs[:role_name], - scope: pb_scope(attrs[:scope]), - if: attrs[:if_statement] - } - ] - }, + cmds: + Command.ddlx( + assigns: [ + %SatPerms.Assign{ + table: pb_table(attrs[:table_name]), + user_column: attrs[:user_column], + role_column: attrs[:role_column], + role_name: attrs[:role_name], + scope: pb_scope(attrs[:scope]), + if: attrs[:if_statement] + } + ] + ), stmt: ddlx, tables: [attrs[:table_name]], tag: "ELECTRIC ASSIGN" diff --git a/components/electric/lib/electric/ddlx/command/grant.ex b/components/electric/lib/electric/ddlx/command/grant.ex index 8b980fbf80..f14cf59dad 100644 --- a/components/electric/lib/electric/ddlx/command/grant.ex +++ b/components/electric/lib/electric/ddlx/command/grant.ex @@ -28,9 +28,7 @@ defmodule Electric.DDLX.Command.Grant do {:ok, %Command{ - cmds: %SatPerms.DDLX{ - grants: grants - }, + cmds: Command.ddlx(grants: grants), stmt: ddlx, tables: [{table_schema, table_name}], tag: "ELECTRIC GRANT" diff --git a/components/electric/lib/electric/ddlx/command/revoke.ex b/components/electric/lib/electric/ddlx/command/revoke.ex index b4cc60a59d..f5fbf9febc 100644 --- a/components/electric/lib/electric/ddlx/command/revoke.ex +++ b/components/electric/lib/electric/ddlx/command/revoke.ex @@ -23,9 +23,7 @@ defmodule Electric.DDLX.Command.Revoke do {:ok, %Command{ - cmds: %SatPerms.DDLX{ - revokes: revokes - }, + cmds: Command.ddlx(revokes: revokes), stmt: ddlx, tables: [{table_schema, table_name}], tag: "ELECTRIC REVOKE" diff --git a/components/electric/lib/electric/ddlx/command/sqlite.ex b/components/electric/lib/electric/ddlx/command/sqlite.ex index 1290c9fc6a..a8add55586 100644 --- a/components/electric/lib/electric/ddlx/command/sqlite.ex +++ b/components/electric/lib/electric/ddlx/command/sqlite.ex @@ -8,7 +8,7 @@ defmodule Electric.DDLX.Command.SQLite do with {:ok, stmt} <- fetch_attr(params, :statement) do {:ok, %Command{ - cmds: %SatPerms.DDLX{sqlite: [%SatPerms.Sqlite{stmt: stmt}]}, + cmds: Command.ddlx(sqlite: [%SatPerms.Sqlite{stmt: stmt}]), stmt: ddlx, tables: [], tag: "ELECTRIC SQLITE" diff --git a/components/electric/lib/electric/ddlx/command/unassign.ex b/components/electric/lib/electric/ddlx/command/unassign.ex index 6aa6eed57e..213b56a067 100644 --- a/components/electric/lib/electric/ddlx/command/unassign.ex +++ b/components/electric/lib/electric/ddlx/command/unassign.ex @@ -20,17 +20,18 @@ defmodule Electric.DDLX.Command.Unassign do {:ok, %Command{ - cmds: %SatPerms.DDLX{ - unassigns: [ - %SatPerms.Unassign{ - table: pb_table(attrs[:table_name]), - user_column: attrs[:user_column], - role_column: attrs[:role_column], - role_name: attrs[:role_name], - scope: pb_scope(attrs[:scope]) - } - ] - }, + cmds: + Command.ddlx( + unassigns: [ + %SatPerms.Unassign{ + table: pb_table(attrs[:table_name]), + user_column: attrs[:user_column], + role_column: attrs[:role_column], + role_name: attrs[:role_name], + scope: pb_scope(attrs[:scope]) + } + ] + ), stmt: ddlx, tables: [attrs[:table_name]], tag: "ELECTRIC UNASSIGN" diff --git a/components/electric/lib/electric/postgres/extension.ex b/components/electric/lib/electric/postgres/extension.ex index 2003b3119f..18b184402c 100644 --- a/components/electric/lib/electric/postgres/extension.ex +++ b/components/electric/lib/electric/postgres/extension.ex @@ -21,14 +21,13 @@ defmodule Electric.Postgres.Extension do @version_relation "migration_versions" @ddl_relation "ddl_commands" - @schema_relation "schema" @electrified_table_relation "electrified" @acked_client_lsn_relation "acknowledged_client_lsns" - @grants_relation "grants" - @roles_relation "roles" - @assignments_relation "assignments" + # permissions storage and management @ddlx_commands_relation "ddlx_commands" + @global_perms_relation "global_perms_state" + @user_perms_relation "user_perms_state" electric = &to_string([?", @schema, ?", ?., ?", &1, ?"]) @@ -40,10 +39,9 @@ defmodule Electric.Postgres.Extension do @transaction_marker_table electric.("transaction_marker") @acked_client_lsn_table electric.(@acked_client_lsn_relation) - @grants_table electric.(@grants_relation) - @roles_table electric.(@roles_relation) - @assignments_table electric.(@assignments_relation) @ddlx_table electric.(@ddlx_commands_relation) + @global_perms_table electric.(@global_perms_relation) + @user_perms_table electric.(@user_perms_relation) @all_schema_query ~s(SELECT "schema", "version", "migration_ddl" FROM #{@schema_table} ORDER BY "version" ASC) @current_schema_query ~s(SELECT "schema", "version" FROM #{@schema_table} ORDER BY "id" DESC LIMIT 1) @@ -110,14 +108,13 @@ defmodule Electric.Postgres.Extension do def transaction_marker_table, do: @transaction_marker_table def acked_client_lsn_table, do: @acked_client_lsn_table - def grants_table, do: @grants_table - def roles_table, do: @roles_table - def assignments_table, do: @assignments_table def ddlx_table, do: @ddlx_table + def global_perms_table, do: @global_perms_table + def user_perms_table, do: @user_perms_table def ddl_relation, do: {@schema, @ddl_relation} def version_relation, do: {@schema, @version_relation} - def schema_relation, do: {@schema, @schema_relation} + def ddlx_relation, do: {@schema, @ddlx_commands_relation} def acked_client_lsn_relation, do: {@schema, @acked_client_lsn_relation} def publication_name, do: @publication_name def slot_name, do: @slot_name @@ -315,7 +312,6 @@ defmodule Electric.Postgres.Extension do Migrations.Migration_20230605141256_ElectrifyFunction, Migrations.Migration_20230715000000_UtilitiesTable, Migrations.Migration_20230814170123_RenameDDLX, - Migrations.Migration_20230814170745_ElectricDDL, Migrations.Migration_20230829000000_AcknowledgedClientLsnsTable, Migrations.Migration_20230918115714_DDLCommandUniqueConstraint, Migrations.Migration_20230921161045_DropEventTriggers, @@ -327,7 +323,8 @@ defmodule Electric.Postgres.Extension do Migrations.Migration_20240110110200_DropUnusedFunctions, Migrations.Migration_20240205141200_ReinstallTriggerFunctionWriteCorrectMaxTag, Migrations.Migration_20240213160300_DropGenerateElectrifiedSqlFunction, - Migrations.Migration_20240212161153_DDLXCommands + Migrations.Migration_20240212161153_DDLXCommands, + Migrations.Migration_20240214131615_PermissionsState ] end diff --git a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex b/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex deleted file mode 100644 index a635aad1e5..0000000000 --- a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl.ex +++ /dev/null @@ -1,77 +0,0 @@ -defmodule Electric.Postgres.Extension.Migrations.Migration_20230814170745_ElectricDDL do - alias Electric.Postgres.Extension - - require EEx - - @behaviour Extension.Migration - - sql_file = Path.expand("20230814170745_electric_ddl/ddlx_init.sql.eex", __DIR__) - - @external_resource sql_file - - @impl true - def version, do: 2023_08_14_17_07_45 - - @impl true - def up(schema) do - grants_table = Extension.grants_table() - roles_table = Extension.roles_table() - assignments_table = Extension.assignments_table() - - ddlx_sql = ddlx_init_sql(schema, grants_table, roles_table, assignments_table) - - tables = [ - """ - CREATE TABLE IF NOT EXISTS #{grants_table} ( - privilege VARCHAR(20) NOT NULL, - on_table VARCHAR(64) NOT NULL, - role VARCHAR(64) NOT NULL, - column_name VARCHAR(64) NOT NULL, - scope VARCHAR(64) NOT NULL, - using_path TEXT, - check_fn TEXT, - CONSTRAINT grants_pkey PRIMARY KEY (privilege, on_table, role, scope, column_name) - ); - """, - """ - CREATE TABLE IF NOT EXISTS #{roles_table} ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - user_id VARCHAR(256) NOT NULL, - scope_table VARCHAR(64), - scope_id VARCHAR(256) - ); - """, - """ - CREATE TABLE IF NOT EXISTS #{assignments_table} ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - table_name VARCHAR(64) NOT NULL, - scope_table VARCHAR(64) NOT NULL, - user_column VARCHAR(64) NOT NULL, - role_name VARCHAR(64) NOT NULL, - role_column VARCHAR(64) NOT NULL, - if_fn TEXT, - CONSTRAINT unique_assign UNIQUE (table_name, scope_table, user_column, role_name, role_column) - ); - """ - ] - - publish_tables = - Enum.map( - [grants_table, roles_table, assignments_table], - &Extension.add_table_to_publication_sql/1 - ) - - tables ++ [ddlx_sql] ++ publish_tables - end - - @impl true - def down(_), do: [] - - EEx.function_from_file(:defp, :ddlx_init_sql, sql_file, [ - :schema, - :grants_table, - :roles_table, - :assignments_table - ]) -end diff --git a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex b/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex deleted file mode 100644 index d3ae868134..0000000000 --- a/components/electric/lib/electric/postgres/extension/migrations/20230814170745_electric_ddl/ddlx_init.sql.eex +++ /dev/null @@ -1,663 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - --- CUT HERE enable function -CREATE OR REPLACE PROCEDURE <%= schema %>.enable(table_name text) SECURITY DEFINER AS -$$ - BEGIN - CALL <%= schema %>.electrify(table_name); - END; -$$ LANGUAGE plpgsql; - --- CUT HERE disable function -CREATE OR REPLACE PROCEDURE <%= schema %>.disable(table_name text) SECURITY DEFINER AS -$$ - BEGIN - END; -$$ LANGUAGE plpgsql; - --- CUT HERE grant function -CREATE OR REPLACE PROCEDURE <%= schema %>.grant(privilege_name text, - on_table_name text, - role_name text, - columns text[], - scope_name text, - using_path text, - check_fn text) - SECURITY DEFINER AS $$ - - DECLARE - col TEXT; - - BEGIN - FOREACH col IN ARRAY columns - LOOP - INSERT INTO <%= grants_table %> ( privilege, on_table, role , column_name, scope, using_path, check_fn) - VALUES (privilege_name, on_table_name, role_name, col, scope_name, using_path, check_fn) - ON CONFLICT ON CONSTRAINT grants_pkey DO UPDATE SET - (using_path, check_fn) = (EXCLUDED.using_path, EXCLUDED.check_fn); - END LOOP; - END; -$$ LANGUAGE plpgsql; - --- CUT HERE revoke function -CREATE OR REPLACE PROCEDURE <%= schema %>.revoke( - privilege_name text, - on_table_name text, - role_name text, - columns text[], - scope_name text -) SECURITY DEFINER AS $$ - - DECLARE - all_columns BOOLEAN; - - BEGIN - PERFORM '*' = ANY(columns) As all_columns; - - IF all_columns THEN - DELETE FROM <%= grants_table %> WHERE - privilege = privilege_name AND - on_table = on_table_name AND - role = role_name AND - scope = scope_name; - ELSE - DELETE FROM <%= grants_table %> WHERE - privilege = privilege_name AND - on_table = on_table_name AND - role = role_name AND - scope = scope_name AND - column_name = any(columns); - END IF; - END; -$$ LANGUAGE plpgsql; - - --- CUT HERE assign function -CREATE OR REPLACE PROCEDURE <%= schema %>.assign( - assign_schema text, - assign_table text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text, - if_fn text -) SECURITY DEFINER AS $$ - -DECLARE - assignment_id uuid; - assign_table_full_name TEXT; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - if_fn_not_null TEXT; - role_def TEXT; - assignment_name TEXT; - user_column_type TEXT; - scope_key_count int; - user_key_count int; - scope_key RECORD; - user_key RECORD; - primary_key RECORD; - -BEGIN - - -- return types for the introspection of foreign keys - CREATE TEMP TABLE scope_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE user_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE pkeys - ( - columns name[10], - types information_schema.character_data[10] - ); - - -- gets the columns and types for the assign_table's primary key - INSERT INTO pkeys SELECT * from <%= schema %>.find_pk(assign_schema, assign_table); - SELECT * FROM pkeys LIMIT 1 INTO primary_key; - - - -- gets the foreign key pointing to the user - INSERT INTO user_fkeys SELECT * from <%= schema %>.find_fk_for_column(assign_schema,assign_table, user_column_name); - SELECT COUNT(*) FROM user_fkeys INTO user_key_count; - - IF user_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not find a foreign key pointing to the user table'; - END IF; - - SELECT * FROM user_fkeys LIMIT 1 INTO user_key; - - SELECT data_type FROM information_schema.columns - WHERE table_name = user_key.to_table and column_name = user_key.to_columns[1] - INTO user_column_type; - - -- sets some things to default strings if the function args are null - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - IF if_fn IS NULL THEN if_fn_not_null = 'true'; ELSE if_fn_not_null = if_fn; END IF; - - IF role_name_string IS NULL AND role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name'; - END IF; - - IF NOT role_name_string IS NULL AND NOT role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name but not both'; - END IF; - - assign_table_full_name = format('%s.%s', assign_schema, assign_table); - - IF role_name_string IS NULL THEN - role_name_not_null = '__none__'; - role_column_not_null = role_column_name; - role_def = format('NEW.%s', role_column_name); - ELSE - role_name_not_null = role_name_string; - role_column_not_null = '__none__'; - role_def = format(E'\'%s\'', role_name_string); - END IF; - - -- reads the foreign key for the scope if it exists - IF NOT scope IS NULL THEN - INSERT INTO scope_fkeys SELECT * from <%= schema %>.find_fk_to_table(assign_schema,assign_table, scope); - SELECT COUNT(*) FROM scope_fkeys INTO scope_key_count; - - IF scope_key_count > 1 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Too many foreign keys for the scope table'; - END IF; - - IF scope_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Could not find a foreign key pointing to the scope table'; - END IF; - - SELECT * FROM scope_fkeys LIMIT 1 INTO scope_key; - - END IF; - - -- Creates the assignment itself. - INSERT INTO <%= assignments_table %> (table_name, scope_table, user_column, role_name, role_column, if_fn) - VALUES (assign_table_full_name, scope_table_not_null, user_column_name, role_name_not_null, role_column_not_null, if_fn) - RETURNING id INTO assignment_id; - - if assignment_id IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not create assignment'; - END IF; - - - -- this is a canonical name used by components owned by this assignment - assignment_name = REPLACE(format('%s', assignment_id), '-', '_'); - - /* - Creates big fat join table. Every time the assignment rule is used and a user is given a role a row will be created - in both this join table and in the table electric.roles. This table serves as a polymorphic join between the roles - table and the different types of both scope table and assignment table, and handles clean up correctly via fk cascade on delete. - - This table have 4 or 5 foreign keys - - It has foreign keys with ON DELETE CASCADE pointing to: - - The assignment created above. This assignment is the rule that causes all the entries in this join to be created in owns them. - - The user that the role has been given too. - - The assignment table item that assigned the role. - - The row in the scope table if one is specified. - - So that any of these being deleted will remove the join. - - And it has a foreign key pointing to the role in electric.roles which it will delete with a trigger. - */ - - EXECUTE format('CREATE TABLE IF NOT EXISTS <%= schema %>.assignment_%s_join ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - user_id %s, - assignment_id uuid, - role_id uuid, - FOREIGN KEY(role_id) - REFERENCES <%= roles_table %> (id), - FOREIGN KEY(user_id) - REFERENCES %s.%s(%s) - ON DELETE CASCADE, - FOREIGN KEY(assignment_id) - REFERENCES <%= assignments_table %> (id) - ON DELETE CASCADE - );', - assignment_name, - user_key.to_types[1], - user_key.to_schema, - user_key.to_table, - user_key.to_columns[1] - ); - - -- Adds a foreign key to the join table pointing to the assign_table - for counter in 1..ARRAY_LENGTH(primary_key.columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join ADD COLUMN IF NOT EXISTS %s_%s %s;', - assignment_name, - assign_table, - primary_key.columns[counter], - primary_key.types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join - ADD CONSTRAINT electric_%s_join_%s_fk - FOREIGN KEY (%s_%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_name, - assignment_name, - assign_table, - assign_table, - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - assign_schema, - assign_table, - ARRAY_TO_STRING(primary_key.columns, ', ') - ); - - -- defines insert and update trigger functions for the assign_table - -- when there is no scope - IF scope IS NULL THEN - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - role_key uuid; - join_key uuid; - BEGIN - - SELECT id, role_id FROM <%= schema %>.assignment_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table %> (user_id, role) - VALUES (NEW.%9$s, %10$s) returning id INTO role_key; - INSERT INTO <%= schema %>.assignment_%1$s_join (user_id, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema %>.assignment_%1$s_join SET user_id = NEW.%9$s - WHERE id = join_key; - UPDATE <%= roles_table %> SET (user_id, role) = (NEW.%9$s, %10s) - WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema %>.assignment_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_name, - --2 - '', - --3 - '', - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope - ); - - -- and when there is a scope - ELSE - for counter in 1..ARRAY_LENGTH(scope_key.from_columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join ADD COLUMN IF NOT EXISTS %s %s;', - assignment_name, - scope_key.from_columns[counter], - scope_key.to_types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema %>.assignment_%s_join - ADD CONSTRAINT electric_%s_join_scope_fk - FOREIGN KEY (%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_name, - assignment_name, - ARRAY_TO_STRING(scope_key.from_columns, ', '), - scope_key.to_schema, - scope_key.to_table, - ARRAY_TO_STRING(scope_key.to_columns, ', ') - ); - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - scope_key TEXT; - scope_list TEXT[]; - role_key uuid; - join_key uuid; - BEGIN - - scope_list := ARRAY[NEW.%3$s::text]; - scope_key := ARRAY_TO_STRING(scope_list, \', \' ); - - SELECT id, role_id FROM <%= schema %>.assignment_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table %> (user_id, role, scope_table, scope_id) - VALUES (NEW.%9$s, %10$s, \'%11$s\', scope_key) returning id INTO role_key; - INSERT INTO <%= schema %>.assignment_%1$s_join (user_id, %12$s, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%13$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema %>.assignment_%1$s_join SET (user_id, %12$s) - = (NEW.%9$s, NEW.%13$s) WHERE id = join_key; - UPDATE <%= roles_table %> SET (user_id, role, scope_table, scope_id) - = (NEW.%9$s, %10$s, \'%11$s\', scope_key) WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema %>.assignment_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_name, - --2 - '', - --3 - ARRAY_TO_STRING(scope_key.from_columns, '::text, NEW.'), - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope, - --12 - ARRAY_TO_STRING(scope_key.from_columns, ', '), - --13 - ARRAY_TO_STRING(scope_key.from_columns, ', NEW.') - ); - END IF; - - -- adds a trigger to the join table that deletes the role itself - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema %>.cleanup_role_%s() RETURNS TRIGGER SECURITY DEFINER - AS $%s$ - BEGIN - DELETE FROM <%= roles_table %> WHERE id = OLD.role_id; - RETURN OLD; - END; - $%s$ LANGUAGE plpgsql;', - assignment_name, - '', - '' - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER electric_cleanup_role_%s - AFTER DELETE ON <%= schema %>.assignment_%s_join - FOR EACH ROW - EXECUTE FUNCTION <%= schema %>.cleanup_role_%s();', - assignment_name, - assignment_name, - assignment_name - ); - - -- adds the insert and update triggers functions to the assign_table - EXECUTE format('CREATE OR REPLACE TRIGGER electric_insert_role_%s - AFTER INSERT ON %s - FOR EACH ROW - EXECUTE FUNCTION <%= schema %>.upsert_role_%s();', - assignment_name, - assign_table, - assignment_name - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER electric_update_role_%s - AFTER UPDATE ON %s - FOR EACH ROW - EXECUTE FUNCTION <%= schema %>.upsert_role_%s();', - assignment_name, - assign_table, - assignment_name - ); - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; -END; -$$ LANGUAGE plpgsql; - - --- CUT HERE unassign function -CREATE OR REPLACE PROCEDURE <%= schema %>.unassign( - assign_schema text, - assign_table text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text -) SECURITY DEFINER AS -$$ - -DECLARE - assignment_id uuid; - assignment_name TEXT; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - assign_table_full_name TEXT; - -BEGIN - - IF role_name_string IS NULL THEN role_name_not_null = '__none__'; ELSE role_name_not_null = role_name_string; END IF; - IF role_column_name IS NULL THEN role_column_not_null = '__none__'; ELSE role_column_not_null = role_column_name; END IF; - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - - assign_table_full_name = format('%s.%s', assign_schema, assign_table); - - SELECT id FROM <%= assignments_table %> - WHERE table_name = assign_table_full_name - AND scope_table = scope_table_not_null - AND user_column = user_column_name - AND role_name = role_name_not_null - AND role_column = role_column_not_null - INTO assignment_id; - - assignment_name = REPLACE(format('%s', assignment_id), '-', '_'); - - -- remove triggers - EXECUTE format('DROP TRIGGER IF EXISTS electric_cleanup_role_%s ON <%= schema %>.assignment_%s_join;', - assignment_name, - assignment_name - ); - - EXECUTE format('DROP TRIGGER IF EXISTS electric_insert_role_%s ON %s;', - assignment_name, - assign_table - ); - - EXECUTE format('DROP TRIGGER IF EXISTS electric_update_role_%s ON %s;', - assignment_name, - assign_table - ); - - -- remove functions - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema %>.cleanup_role_%s;', - assignment_name - ); - - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema %>.upsert_role_%s;', - assignment_name - ); - - -- remove join table - EXECUTE format('DROP TABLE IF EXISTS <%= schema %>.assignment_%s_join;', - assignment_name - ); - - -- remove assignment - DELETE FROM <%= assignments_table %> WHERE id = assignment_id; -END; -$$ LANGUAGE plpgsql; - --- CUT HERE sqlite function -CREATE OR REPLACE PROCEDURE <%= schema %>.sqlite(sql text) - SECURITY DEFINER AS $$ - BEGIN - NULL; - END; -$$ LANGUAGE plpgsql; - - --- CUT HERE find foreign keys -CREATE OR REPLACE FUNCTION <%= schema %>.find_fk_to_table( - src_schema text, - src_table text, - dst_table text) RETURNS TABLE( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ) SECURITY DEFINER AS $$ - - BEGIN - RETURN QUERY - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition) AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition) AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' and tbl.relname = src_table and f_tbl.relname = dst_table and sch.nspname = src_schema - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - END; - -$$ LANGUAGE plpgsql; - --- CUT HERE find foreign keys for column -CREATE OR REPLACE FUNCTION <%= schema %>.find_fk_for_column( - src_schema text, - src_table text, - src_column text) RETURNS TABLE( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ) SECURITY DEFINER AS $$ - - BEGIN - RETURN QUERY - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition) AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition) AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' and tbl.relname = src_table and col.attname = src_column and sch.nspname = src_schema - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - END; - -$$ LANGUAGE plpgsql; - --- CUT HERE find primary key -CREATE OR REPLACE FUNCTION <%= schema %>.find_pk( - src_schema text, - src_table text) RETURNS TABLE( - columns name[10], - types information_schema.character_data[10] - ) SECURITY DEFINER AS $$ - BEGIN - RETURN QUERY - SELECT ARRAY_AGG(col.attname ORDER BY u.attposition) AS "columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'p' and tbl.relname = src_table and sch.nspname = src_schema; - END; - -$$ LANGUAGE plpgsql; - diff --git a/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex b/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex index fd5bc3c833..8c8a7f0f4f 100644 --- a/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex +++ b/components/electric/lib/electric/postgres/extension/migrations/20240212161153_ddlx_commands.ex @@ -9,7 +9,6 @@ defmodule Electric.Postgres.Extension.Migrations.Migration_20240212161153_DDLXCo @impl true def up(schema) do ddlx_table = Extension.ddlx_table() - assignments_table = Extension.assignments_table() txid_type = Extension.txid_type() txts_type = Extension.txts_type() @@ -22,17 +21,6 @@ defmodule Electric.Postgres.Extension.Migrations.Migration_20240212161153_DDLXCo ddlx bytea NOT NULL ); """, - """ - DROP PROCEDURE #{schema}.assign(text,text,text,text,text,text,text); - """, - """ - DROP PROCEDURE #{schema}.unassign(text,text,text,text,text,text); - """, - # change assignment id type because we're now generating this externally - """ - ALTER TABLE #{assignments_table} ALTER COLUMN id TYPE text, - ALTER COLUMN id DROP DEFAULT; - """, Extension.add_table_to_publication_sql(ddlx_table) ] end diff --git a/components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex b/components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex new file mode 100644 index 0000000000..b244354ad2 --- /dev/null +++ b/components/electric/lib/electric/postgres/extension/migrations/20240214131615_permissions_state.ex @@ -0,0 +1,89 @@ +defmodule Electric.Postgres.Extension.Migrations.Migration_20240214131615_PermissionsState do + alias Electric.Postgres.Extension + alias Electric.Satellite.SatPerms + + @behaviour Extension.Migration + + @impl true + def version, do: 2024_02_14_13_16_15 + + @impl true + def up(schema) do + global_perms_table = Extension.global_perms_table() + user_perms_table = Extension.user_perms_table() + + empty_rules = + %SatPerms.Rules{id: 1} |> Protox.encode!() |> IO.iodata_to_binary() |> Base.encode16() + + [ + """ + CREATE TABLE #{global_perms_table} ( + id int8 NOT NULL PRIMARY KEY, + parent_id int8 UNIQUE REFERENCES #{global_perms_table} (id) ON DELETE SET NULL, + rules bytea NOT NULL, + inserted_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP + ); + """, + """ + CREATE UNIQUE INDEX ON #{global_perms_table} ((1)) WHERE parent_id IS NULL; + """, + """ + CREATE TABLE #{user_perms_table} ( + id serial8 NOT NULL PRIMARY KEY, + parent_id int8 REFERENCES #{user_perms_table} (id) ON DELETE SET NULL, + global_perms_id int8 NOT NULL REFERENCES #{global_perms_table} (id) ON DELETE CASCADE, + user_id text NOT NULL, + roles bytea NOT NULL, + inserted_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP + ); + """, + """ + CREATE UNIQUE INDEX ON #{user_perms_table} (user_id) WHERE parent_id IS NULL; + """, + """ + CREATE INDEX user_perms_user_id_idx ON #{user_perms_table} (user_id, id); + """, + """ + INSERT INTO #{global_perms_table} (id, rules) VALUES (1, '\\x#{empty_rules}'::bytea) + """, + """ + DROP TABLE IF EXISTS #{schema}.roles CASCADE + """, + """ + DROP TABLE IF EXISTS #{schema}.grants CASCADE + """, + """ + DROP TABLE IF EXISTS #{schema}.assignments CASCADE + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.assign; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.unassign; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.grant; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.revoke; + """, + """ + DROP PROCEDURE IF EXISTS #{schema}.sqlite; + """, + """ + DROP FUNCTION IF EXISTS #{schema}.find_fk_to_table; + """, + """ + DROP FUNCTION IF EXISTS #{schema}.find_fk_for_column; + """, + """ + DROP FUNCTION IF EXISTS #{schema}.find_pk; + """ + ] + end + + @impl true + def down(_schema) do + [] + end +end diff --git a/components/electric/lib/electric/postgres/extension/permissions.ex b/components/electric/lib/electric/postgres/extension/permissions.ex new file mode 100644 index 0000000000..4e865983a6 --- /dev/null +++ b/components/electric/lib/electric/postgres/extension/permissions.ex @@ -0,0 +1,149 @@ +defmodule Electric.Postgres.Extension.Permissions do + alias Electric.Postgres.Extension + alias Electric.Satellite.SatPerms + + @global_perms_table Extension.global_perms_table() + @user_perms_table Extension.user_perms_table() + + @shared_global_query """ + SELECT "id", "parent_id", "rules" FROM #{@global_perms_table} + """ + + @current_global_query """ + #{@shared_global_query} + ORDER BY "id" DESC + LIMIT 1 + """ + + @specific_global_query """ + #{@shared_global_query} + WHERE id = $1 + LIMIT 1 + """ + + @shared_user_query """ + SELECT u.id, + u.parent_id, + u.roles, + g.rules + FROM #{@user_perms_table} u + INNER JOIN #{@global_perms_table} g ON g.id = u.global_perms_id + """ + + @current_user_query """ + #{@shared_user_query} + WHERE (u.user_id = $1) + ORDER BY u.id DESC + LIMIT 1 + """ + + @specific_user_query """ + #{@shared_user_query} + WHERE (u.user_id = $1) AND (u.id = $2) + LIMIT 1 + """ + + # We need to duplicate all the current user perms that, which all depend on the previous version + # of the global rules. This query is complicated by the need to only select the most current + # version of each user's permissions (because for a given rules id, a user may have multiple + # versions of their roles). + @save_global_query """ + WITH global_perms AS ( + INSERT INTO #{@global_perms_table} (id, parent_id, rules) + VALUES ($1, $2, $3) RETURNING id, parent_id + ) + INSERT INTO #{@user_perms_table} (user_id, parent_id, roles, global_perms_id) + SELECT u.*, global_perms.id FROM + (SELECT DISTINCT user_id FROM #{@user_perms_table} ORDER BY user_id) uid + JOIN LATERAL ( + SELECT ui.user_id, ui.id, ui.roles FROM #{@user_perms_table} ui + WHERE ui.user_id = uid.user_id + ORDER BY ui.id DESC + LIMIT 1 + ) u ON TRUE, global_perms + """ + + @create_user_query """ + WITH global_perms AS ( + SELECT id, rules + FROM #{@global_perms_table} + ORDER BY id DESC + LIMIT 1 + ), user_perms AS ( + INSERT INTO #{@user_perms_table} (user_id, parent_id, roles, global_perms_id) + SELECT $1, $2, $3, g.id + FROM global_perms g + RETURNING id + ) + SELECT user_perms.id AS user_id, + global_perms.id AS global_id, + global_perms.rules + FROM user_perms, global_perms + """ + + def global(conn) do + with {:ok, _cols, [row]} <- :epgsql.equery(conn, @current_global_query, []), + {_id, _parent_id, bytes} = row do + Protox.decode(bytes, SatPerms.Rules) + end + end + + def global(conn, id) do + with {:ok, _cols, [row]} <- :epgsql.equery(conn, @specific_global_query, [id]), + {_id, _parent_id, bytes} = row do + Protox.decode(bytes, SatPerms.Rules) + end + end + + def save_global(conn, %SatPerms.Rules{id: id, parent_id: parent_id} = rules) do + with {:ok, iodata} <- Protox.encode(rules), + bytes = IO.iodata_to_binary(iodata), + {:ok, _users} <- :epgsql.equery(conn, @save_global_query, [id, parent_id, bytes]) do + :ok + end + end + + def user(conn, user_id) do + load_user_perms(conn, user_id, @current_user_query, [user_id], fn conn -> + insert_user(conn, user_id) + end) + end + + def user(conn, user_id, perms_id) do + load_user_perms(conn, user_id, @specific_user_query, [user_id, perms_id], fn _conn -> + {:error, "no user permissions found for user=#{user_id} id=#{perms_id}"} + end) + end + + def save_user(conn, user_id, %SatPerms.Roles{} = roles) do + insert_user(conn, user_id, roles) + end + + defp load_user_perms(conn, user_id, query, binds, not_found_fun) do + case :epgsql.equery(conn, query, binds) do + {:ok, _, [{id, _parent_id, roles_bytes, rules_bytes}]} -> + with {:ok, roles} <- Protox.decode(roles_bytes, SatPerms.Roles), + {:ok, rules} <- Protox.decode(rules_bytes, SatPerms.Rules) do + {:ok, %SatPerms{id: id, user_id: user_id, rules: rules, roles: roles.roles}} + end + + {:ok, _, []} -> + not_found_fun.(conn) + + error -> + error + end + end + + defp insert_user(conn, user_id, roles \\ %SatPerms.Roles{}) do + encoded_roles = + roles |> Protox.encode!() |> IO.iodata_to_binary() + + with {:ok, _, [row]} <- + :epgsql.equery(conn, @create_user_query, [user_id, roles.parent_id, encoded_roles]), + {id, _global_perms_id, rules} = row, + {:ok, rules} = Protox.decode(rules, SatPerms.Rules) do + {:ok, %SatPerms{id: id, user_id: user_id, rules: rules, roles: roles.roles}} + end + end +end diff --git a/components/electric/lib/electric/postgres/extension/permissions_loader.ex b/components/electric/lib/electric/postgres/extension/permissions_loader.ex new file mode 100644 index 0000000000..287da0305b --- /dev/null +++ b/components/electric/lib/electric/postgres/extension/permissions_loader.ex @@ -0,0 +1,6 @@ +defmodule Electric.Postgres.Extension.PermissionsLoader do + @type state() :: term() + + # @callback load(state()) + # @callback load(state()) +end diff --git a/components/electric/lib/electric/postgres/extension/schema_cache.ex b/components/electric/lib/electric/postgres/extension/schema_cache.ex index a438b54218..b1bd2bedc2 100644 --- a/components/electric/lib/electric/postgres/extension/schema_cache.ex +++ b/components/electric/lib/electric/postgres/extension/schema_cache.ex @@ -71,7 +71,7 @@ defmodule Electric.Postgres.Extension.SchemaCache do end @impl SchemaLoader - def connect(conn_config, _opts) do + def connect(_opts, conn_config) do {:ok, Connectors.origin(conn_config)} end @@ -150,6 +150,36 @@ defmodule Electric.Postgres.Extension.SchemaCache do call(origin, {:tx_version, row}) end + @impl SchemaLoader + def global_permissions(origin) do + call(origin, :global_permissions) + end + + @impl SchemaLoader + def global_permissions(origin, id) do + call(origin, {:global_permissions, id}) + end + + @impl SchemaLoader + def save_global_permissions(origin, rules) do + call(origin, {:save_global_permissions, rules}) + end + + @impl SchemaLoader + def user_permissions(origin, user_id) do + call(origin, {:user_permissions, user_id}) + end + + @impl SchemaLoader + def user_permissions(origin, user_id, permissions_id) do + call(origin, {:user_permissions, user_id, permissions_id}) + end + + @impl SchemaLoader + def save_user_permissions(origin, user_id, roles) do + call(origin, {:save_user_permissions, user_id, roles}) + end + def relation(origin, oid) when is_integer(oid) do call(origin, {:relation, oid}) end @@ -372,6 +402,54 @@ defmodule Electric.Postgres.Extension.SchemaCache do {:reply, result, state} end + def handle_call(:global_permissions, _from, state) do + {:reply, SchemaLoader.global_permissions(state.backend), state} + end + + def handle_call({:global_permissions, id}, _from, state) do + {:reply, SchemaLoader.global_permissions(state.backend, id), state} + end + + def handle_call({:save_global_permissions, rules}, _from, state) do + case SchemaLoader.save_global_permissions(state.backend, rules) do + {:ok, backend} -> + {:reply, {:ok, state.origin}, %{state | backend: backend}} + + error -> + {:reply, error, state} + end + end + + def handle_call({:user_permissions, user_id}, _from, state) do + case SchemaLoader.user_permissions(state.backend, user_id) do + {:ok, backend, roles} -> + {:reply, {:ok, state.origin, roles}, %{state | backend: backend}} + + error -> + {:reply, error, state} + end + end + + def handle_call({:user_permissions, user_id, permissions_id}, _from, state) do + case SchemaLoader.user_permissions(state.backend, user_id, permissions_id) do + {:ok, roles} -> + {:reply, {:ok, roles}, state} + + error -> + {:reply, error, state} + end + end + + def handle_call({:save_user_permissions, user_id, roles}, _from, state) do + case SchemaLoader.save_user_permissions(state.backend, user_id, roles) do + {:ok, backend} -> + {:reply, {:ok, state.origin}, %{state | backend: backend}} + + error -> + {:reply, error, state} + end + end + # Prevent deadlocks: # the list of electrified tables is cached and this refresh_subscription call # is done via an async Task because otherwise we get into a deadlock in the diff --git a/components/electric/lib/electric/postgres/extension/schema_loader.ex b/components/electric/lib/electric/postgres/extension/schema_loader.ex index d07da1cea6..49ec687ac2 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader.ex @@ -1,6 +1,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader do alias Electric.Postgres.{Schema, Extension.Migration} alias Electric.Replication.Connectors + alias Electric.Satellite.SatPerms alias __MODULE__.Version @type state() :: term() @@ -18,8 +19,8 @@ defmodule Electric.Postgres.Extension.SchemaLoader do @type t() :: {module(), state()} @type tx_fk_row() :: %{binary() => integer() | binary()} - @callback connect(Connectors.config(), Keyword.t()) :: {:ok, state()} - @callback load(state()) :: {:ok, Version.t()} + @callback connect(term(), Connectors.config()) :: {:ok, state()} + @callback load(state()) :: {:ok, Version.t()} | {:error, binary()} @callback load(state(), version()) :: {:ok, Version.t()} | {:error, binary()} @callback save(state(), version(), Schema.t(), [String.t()]) :: {:ok, state(), Version.t()} | {:error, term()} @@ -33,8 +34,29 @@ defmodule Electric.Postgres.Extension.SchemaLoader do @callback index_electrified?(state(), relation()) :: {:ok, boolean()} | {:error, term()} @callback tx_version(state(), tx_fk_row()) :: {:ok, version()} | {:error, term()} + # ok, so these permissions related callbacks are definitely the last nail in the coffin of the + # `SchemaLoader` idea. basically we need the same kind of access to some usually pg-backed + # permissions state data as we do to the schema state. seems pointless to duplicate the pg + # connection stuff, plus why have two connection pools when we already have one. + @callback global_permissions(state()) :: {:ok, %SatPerms.Rules{}} | {:error, term()} + @callback global_permissions(state(), id :: integer()) :: + {:ok, %SatPerms.Rules{}} | {:error, term()} + # loading user permissions for a new user requires inserting an empty state + @callback user_permissions(state(), user_id :: binary()) :: + {:ok, state(), %SatPerms{}} | {:error, term()} + + @callback user_permissions(state(), user_id :: binary(), id :: integer()) :: + {:ok, %SatPerms{}} | {:error, term()} + + @callback save_global_permissions(state(), %SatPerms.Rules{}) :: + {:ok, state()} | {:error, term()} + @callback save_user_permissions(state(), user_id :: binary(), %SatPerms.Roles{}) :: + {:ok, state(), %SatPerms{}} | {:error, term()} + @default_backend {__MODULE__.Epgsql, []} + @behaviour __MODULE__ + def get(opts, key, default \\ @default_backend) do case Keyword.get(opts, key, default) do module when is_atom(module) -> @@ -45,42 +67,51 @@ defmodule Electric.Postgres.Extension.SchemaLoader do end end + @impl true def connect({module, opts}, conn_config) do - with {:ok, state} <- module.connect(conn_config, opts) do + with {:ok, state} <- module.connect(opts, conn_config) do {:ok, {module, state}} end end + @impl true def load({module, state}) do module.load(state) end + @impl true def load({module, state}, version) do module.load(state, version) end + @impl true def save({module, state}, version, schema, stmts) do with {:ok, state, schema_version} <- module.save(state, version, schema, stmts) do {:ok, {module, state}, schema_version} end end + @impl true def relation_oid({module, state}, rel_type, schema, table) do module.relation_oid(state, rel_type, schema, table) end + @impl true def refresh_subscription({module, state}, name) do module.refresh_subscription(state, name) end + @impl true def migration_history({module, state}, version) do module.migration_history(state, version) end + @impl true def known_migration_version?({module, state}, version) do module.known_migration_version?(state, version) end + @impl true def internal_schema({module, state}) do module.internal_schema(state) end @@ -91,15 +122,56 @@ defmodule Electric.Postgres.Extension.SchemaLoader do end end + @impl true def table_electrified?({module, state}, relation) do module.table_electrified?(state, relation) end + @impl true def index_electrified?({module, state}, relation) do module.index_electrified?(state, relation) end + @impl true def tx_version({module, state}, row) do module.tx_version(state, row) end + + @impl true + def global_permissions({module, state}) do + module.global_permissions(state) + end + + @impl true + def global_permissions({module, state}, id) do + module.global_permissions(state, id) + end + + @impl true + def save_global_permissions({module, state}, rules) do + with {:ok, state} <- module.save_global_permissions(state, rules) do + {:ok, {module, state}} + end + end + + @impl true + def user_permissions({module, state}, user_id) do + with {:ok, state, perms} <- module.user_permissions(state, user_id) do + {:ok, {module, state}, perms} + end + end + + @impl true + def user_permissions({module, state}, user_id, perms_id) do + with {:ok, perms} <- module.user_permissions(state, user_id, perms_id) do + {:ok, perms} + end + end + + @impl true + def save_user_permissions({module, state}, user_id, roles) do + with {:ok, state, perms} <- module.save_user_permissions(state, user_id, roles) do + {:ok, {module, state}, perms} + end + end end diff --git a/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex b/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex index 14bf123052..8bfab458d9 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader/epgsql.ex @@ -65,8 +65,8 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do @pool_timeout 5_000 - @impl true - def connect(conn_config, _opts) do + @impl SchemaLoader + def connect(_opts, conn_config) do {:ok, _pool} = NimblePool.start_link( worker: {ConnectionPool, conn_config}, @@ -88,7 +88,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do ) end - @impl true + @impl SchemaLoader def load(pool) do checkout!(pool, fn conn -> with {:ok, version, schema} <- Extension.current_schema(conn) do @@ -97,7 +97,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def load(pool, version) do checkout!(pool, fn conn -> with {:ok, version, schema} <- Extension.schema_version(conn, version) do @@ -106,7 +106,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def save(pool, version, schema, stmts) do checkout!(pool, fn conn -> with :ok <- Extension.save_schema(conn, version, schema, stmts) do @@ -115,7 +115,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def relation_oid(_conn, :trigger, _schema, _table) do raise RuntimeError, message: "oid lookup for triggers no implemented" end @@ -126,7 +126,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def refresh_subscription(pool, name) do checkout!(pool, fn conn -> query = ~s|ALTER SUBSCRIPTION "#{name}" REFRESH PUBLICATION WITH (copy_data = false)| @@ -147,21 +147,21 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def migration_history(pool, version) do checkout!(pool, fn conn -> Extension.migration_history(conn, version) end) end - @impl true + @impl SchemaLoader def known_migration_version?(pool, version) do checkout!(pool, fn conn -> Extension.known_migration_version?(conn, version) end) end - @impl true + @impl SchemaLoader def internal_schema(pool) do checkout!(pool, fn conn -> oid_loader = &Client.relation_oid(conn, &1, &2, &3) @@ -172,24 +172,74 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Epgsql do end) end - @impl true + @impl SchemaLoader def table_electrified?(pool, {schema, name}) do checkout!(pool, fn conn -> Extension.electrified?(conn, schema, name) end) end - @impl true + @impl SchemaLoader def index_electrified?(pool, {schema, name}) do checkout!(pool, fn conn -> Extension.index_electrified?(conn, schema, name) end) end - @impl true + @impl SchemaLoader def tx_version(pool, row) do checkout!(pool, fn conn -> Extension.tx_version(conn, row) end) end + + @impl SchemaLoader + def global_permissions(pool) do + checkout!(pool, fn conn -> + Extension.Permissions.global(conn) + end) + end + + @impl SchemaLoader + def global_permissions(pool, id) do + checkout!(pool, fn conn -> + Extension.Permissions.global(conn, id) + end) + end + + @impl SchemaLoader + def save_global_permissions(pool, permissions) do + checkout!(pool, fn conn -> + with :ok <- Extension.Permissions.save_global(conn, permissions) do + {:ok, pool} + end + end) + end + + @impl SchemaLoader + def user_permissions(pool, user_id) do + checkout!(pool, fn conn -> + with {:ok, perms} <- Extension.Permissions.user(conn, user_id) do + {:ok, pool, perms} + end + end) + end + + @impl SchemaLoader + def user_permissions(pool, user_id, perms_id) do + checkout!(pool, fn conn -> + with {:ok, perms} <- Extension.Permissions.user(conn, user_id, perms_id) do + {:ok, perms} + end + end) + end + + @impl SchemaLoader + def save_user_permissions(pool, user_id, roles) do + checkout!(pool, fn conn -> + with {:ok, perms} <- Extension.Permissions.save_user(conn, user_id, roles) do + {:ok, pool, perms} + end + end) + end end diff --git a/components/electric/lib/electric/postgres/extension/schema_loader/version.ex b/components/electric/lib/electric/postgres/extension/schema_loader/version.ex index 02e7baef58..d709c14b66 100644 --- a/components/electric/lib/electric/postgres/extension/schema_loader/version.ex +++ b/components/electric/lib/electric/postgres/extension/schema_loader/version.ex @@ -19,7 +19,7 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Version do primary_keys: %{relation() => [String.t()]} } - @spec new(version(), Schema.t()) :: t() + @spec new(version() | nil, Schema.t()) :: t() def new(version, %Schema.Proto.Schema{} = schema) do %__MODULE__{version: version, schema: schema} |> Map.update!(:tables, &cache_tables_by_name(&1, schema)) @@ -100,6 +100,19 @@ defmodule Electric.Postgres.Extension.SchemaLoader.Version do fetch_table_value(pks, relation) end + def foreign_keys(%__MODULE__{} = version, {_, _} = relation, {_, _} = target) do + graph = fk_graph(version) + + case Graph.edges(graph, relation, target) do + [] -> + {:error, + "no foreign key found from #{Electric.Utils.inspect_relation(relation)} to #{Electric.Utils.inspect_relation(target)}"} + + [fk] -> + {:ok, fk.label} + end + end + @spec fk_graph(t()) :: Graph.t() def fk_graph(%__MODULE__{fk_graph: fk_graph}) do fk_graph diff --git a/components/electric/lib/electric/postgres/proxy/handler.ex b/components/electric/lib/electric/postgres/proxy/handler.ex index 835a42d309..41260b12d1 100644 --- a/components/electric/lib/electric/postgres/proxy/handler.ex +++ b/components/electric/lib/electric/postgres/proxy/handler.ex @@ -230,7 +230,7 @@ defmodule Electric.Postgres.Proxy.Handler do %{loader: {loader_module, loader_opts}, connector_config: connector_config} = state - {:ok, loader_conn} = loader_module.connect(connector_config, loader_opts) + {:ok, loader_conn} = loader_module.connect(loader_opts, connector_config) {:ok, {stack, _state} = injector} = state.injector_opts diff --git a/components/electric/lib/electric/replication/changes.ex b/components/electric/lib/electric/replication/changes.ex index b639b37294..8214b95c78 100644 --- a/components/electric/lib/electric/replication/changes.ex +++ b/components/electric/lib/electric/replication/changes.ex @@ -29,6 +29,7 @@ defmodule Electric.Replication.Changes do Changes.NewRecord.t() | Changes.UpdatedRecord.t() | Changes.DeletedRecord.t() + | Changes.UpdatedPermissions.t() defmodule Transaction do alias Electric.Replication.Changes @@ -150,6 +151,36 @@ defmodule Electric.Replication.Changes do defstruct [:relation] end + defmodule UpdatedPermissions do + defmodule UserPermissions do + # When a user's permissions are changed, through some role change, only connections for that + # user need to do anything and since we know the entire permissions state for the user, + # including the important id, at this point just send them along + defstruct [:user_id, :permissions] + + @type t() :: %__MODULE__{user_id: binary(), permissions: %Electric.Satellite.SatPerms{}} + end + + defmodule GlobalPermissions do + # When the global permissions change, i.e. some ddlx command is received via the proxy, then + # every connected user will have to update their permissions. The actual permission id for a + # given user is not knowable without asking pg, so it has to mean every active connection + # bashing the db to load the new permissions for the user. So it's pointless including the + # actual global permissions state. + defstruct [:permissions_id] + + @type t() :: %__MODULE__{ + permissions_id: integer() + } + end + + defstruct [:type, :permissions] + + @type t() :: + %__MODULE__{type: :user, permissions: UserPermissions.t()} + | %__MODULE__{type: :global, permissions: GlobalPermissions.t()} + end + @spec filter_changes_belonging_to_user(Transaction.t(), binary()) :: Transaction.t() def filter_changes_belonging_to_user(%Transaction{changes: changes} = tx, user_id) do %{tx | changes: Enum.filter(changes, &Changes.Ownership.change_belongs_to_user?(&1, user_id))} diff --git a/components/electric/lib/electric/replication/postgres/migration_consumer.ex b/components/electric/lib/electric/replication/postgres/migration_consumer.ex index 9ad586ac89..adf9c93955 100644 --- a/components/electric/lib/electric/replication/postgres/migration_consumer.ex +++ b/components/electric/lib/electric/replication/postgres/migration_consumer.ex @@ -15,9 +15,10 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do Schema } - alias Electric.Replication.Changes.{NewRecord, Transaction} + alias Electric.Replication.Changes.NewRecord alias Electric.Replication.Connectors alias Electric.Replication.Postgres.Client + alias Electric.Satellite.Permissions alias Electric.Telemetry.Metrics @@ -59,6 +60,8 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do |> SchemaLoader.get(:backend, SchemaCache) |> SchemaLoader.connect(conn_config) + {:ok, permissions_consumer} = Permissions.Consumer.new(loader) + refresh_sub? = Keyword.get(opts, :refresh_subscription, true) Logger.info("Starting #{__MODULE__} using #{elem(loader, 0)} backend") @@ -69,6 +72,7 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do subscription: subscription, producer: producer, loader: loader, + permissions: permissions_consumer, opts: opts, refresh_subscription: refresh_sub?, refresh_enum_types: Keyword.get(opts, :refresh_enum_types, true), @@ -93,17 +97,26 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do @impl GenStage def handle_events(transactions, _from, state) do - {:noreply, filter_transactions(transactions), process_migrations(transactions, state)} + {txns, state} = process_transactions(transactions, state) + {:noreply, txns, state} + end + + defp process_transactions(transactions, state) do + {_transactions, _state} = + Enum.map_reduce(transactions, state, &process_transaction/2) end - defp filter_transactions(transactions) do - Enum.map(transactions, &filter_transaction/1) + defp process_transaction(tx, state) do + {changes, state} = + {tx.changes, state} + |> process_migrations() + |> process_permissions() + |> filter_changes() + + {%{tx | changes: changes}, state} end - # FIXME: we need this to prevent extension metadata tables from being - # replicated between pg instances. Should be removed once we're only - # replicating a subset of tables, rather than all - defp filter_transaction(%Transaction{changes: changes} = tx) do + defp filter_changes({changes, state}) do filtered = Enum.filter(changes, fn %{relation: relation} when is_ddl_relation(relation) -> @@ -116,39 +129,47 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do Logger.debug("---- Filtering #{inspect(change)}") false - # TODO: VAX-680 remove this special casing of schema_migrations table - # once we are selectivley replicating tables - %{relation: {"public", "schema_migrations"}} -> - false - _change -> true end) - %{tx | changes: filtered} + {filtered, state} end - defp filter_transaction(change) do - change + defp process_permissions({changes, state}) do + %{permissions: consumer_state, loader: loader} = state + + {:ok, changes, consumer_state, loader} = + Permissions.Consumer.update(changes, consumer_state, loader) + + {changes, %{state | permissions: consumer_state, loader: loader}} end - defp process_migrations(transactions, state) do - {state, num_applied_migrations} = - transactions - |> Enum.flat_map(&transaction_changes_to_migrations(&1, state)) + defp process_migrations({changes, state}) do + {state, migration_versions} = + changes + |> transaction_changes_to_migrations(state) |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) - |> Enum.reduce({state, 0}, fn migration, {state, num_applied} -> - {perform_migration(migration, state), num_applied + 1} + |> Enum.reduce({state, []}, fn migration, {state, versions} -> + {state, schema_version} = perform_migration(migration, state) + {state, [schema_version | versions]} end) - if num_applied_migrations > 0 do - refresh_subscription(state) - else - state + case migration_versions do + [] -> + {changes, state} + + [schema_version | _] -> + state = + state + |> refresh_permissions_consumer(schema_version) + |> refresh_subscription() + + {changes, state} end end - defp transaction_changes_to_migrations(%Transaction{changes: changes}, state) do + defp transaction_changes_to_migrations(changes, state) do for %NewRecord{record: record, relation: relation} <- changes, is_ddl_relation(relation) do {:ok, version} = SchemaLoader.tx_version(state.loader, record) {:ok, sql} = Extension.extract_ddl_sql(record) @@ -165,7 +186,7 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do %{migration_version: version} ) - %{state | loader: loader} + {%{state | loader: loader}, schema_version} end # update the subscription to add any new @@ -180,6 +201,11 @@ defmodule Electric.Replication.Postgres.MigrationConsumer do state end + defp refresh_permissions_consumer(state, schema_version) do + consumer_state = Permissions.Consumer.update_schema(state.permissions, schema_version) + %{state | permissions: consumer_state} + end + @impl GenStage def handle_cancel({:down, _}, _from, %{producer: producer} = state) do Logger.warning("producer is down: #{inspect(producer)}") diff --git a/components/electric/lib/electric/satellite/permissions.ex b/components/electric/lib/electric/satellite/permissions.ex index 8091c1012b..1ef9ca7084 100644 --- a/components/electric/lib/electric/satellite/permissions.ex +++ b/components/electric/lib/electric/satellite/permissions.ex @@ -154,8 +154,19 @@ defmodule Electric.Satellite.Permissions do """ use Electric.Satellite.Protobuf + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Replication.Changes - alias Electric.Satellite.Permissions.{Grant, Read, Role, Graph, Transient, WriteBuffer, Trigger} + + alias Electric.Satellite.Permissions.{ + Grant, + Graph, + Read, + Role, + Transient, + Trigger, + WriteBuffer + } + alias Electric.Satellite.{Auth, SatPerms} require Logger @@ -196,7 +207,6 @@ defmodule Electric.Satellite.Permissions do end defstruct [ - :source, :roles, :scoped_roles, :auth, @@ -204,6 +214,7 @@ defmodule Electric.Satellite.Permissions do :write_buffer, :triggers, :intermediate_roles, + source: %{rules: %{grants: [], assigns: []}, roles: [], schema: nil}, transient_lut: Transient ] @@ -236,16 +247,19 @@ defmodule Electric.Satellite.Permissions do @type t() :: %__MODULE__{ roles: role_lookup(), source: %{ - grants: [%SatPerms.Grant{}], + rules: %{ + grants: [%SatPerms.Grant{}], + assigns: [%SatPerms.Assign{}] + }, roles: [%SatPerms.Role{}], - assigns: [%SatPerms.Assign{}] + schema: SchemaLoader.Version.t() }, auth: Auth.t(), transient_lut: Transient.lut(), write_buffer: WriteBuffer.t(), scopes: [relation()], scoped_roles: %{relation => [Role.t()]}, - triggers: %{relation() => [Trigger.assign_trigger_fun()]} + triggers: Trigger.triggers() } @doc """ @@ -278,18 +292,55 @@ defmodule Electric.Satellite.Permissions do - `roles` should be a list of `%SatPerms.Role{}` protobuf structs """ - @spec update(empty() | t(), %SatPerms.Rules{}, [%SatPerms.Role{}]) :: t() - def update(%__MODULE__{} = perms, rules, roles) do - %{grants: grants, assigns: assigns} = rules + @spec update(empty() | t(), SchemaLoader.Version.t(), %SatPerms.Rules{}, [%SatPerms.Role{}]) :: + t() + def update(%__MODULE__{} = perms, schema_version, rules, roles) do + update(perms, schema: schema_version, rules: rules, roles: roles) + end + + def update(%__MODULE__{} = perms, attrs) when is_list(attrs) do + perms + |> update_rules(Keyword.get(attrs, :rules)) + |> update_roles(Keyword.get(attrs, :roles)) + |> update_schema(Keyword.get(attrs, :schema)) + |> rebuild() + end - assigned_roles = build_roles(roles, perms.auth) + defp update_schema(perms, nil) do + perms + end + + defp update_schema(perms, %SchemaLoader.Version{} = schema_version) do + %{perms | source: %{perms.source | schema: schema_version}} + end + + defp update_roles(perms, nil) do + perms + end + + defp update_roles(perms, roles) when is_list(roles) do + %{perms | source: %{perms.source | roles: roles}} + end + + defp update_rules(perms, nil) do + perms + end + + defp update_rules(perms, %{grants: _, assigns: _} = rules) do + %{perms | source: %{perms.source | rules: Map.take(rules, [:grants, :assigns])}} + end + + defp rebuild(perms) do + %{grants: grants, assigns: assigns} = perms.source.rules + + assigned_roles = build_roles(perms.source.roles, perms.auth, assigns) scoped_roles = compile_scopes(assigned_roles) - triggers = build_triggers(assigns) + + triggers = Trigger.assign_triggers(assigns, perms.source.schema, &trigger_callback/3) %{ perms - | source: %{grants: grants, assigns: assigns, roles: roles}, - roles: build_role_grants(assigned_roles, grants), + | roles: build_role_grants(assigned_roles, grants), scoped_roles: scoped_roles, scopes: Map.keys(scoped_roles), triggers: triggers @@ -315,12 +366,6 @@ defmodule Electric.Satellite.Permissions do |> Map.new(&classify_roles/1) end - defp build_triggers(assigns) do - assigns - |> Stream.flat_map(&Trigger.for_assign/1) - |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) - end - # For every `{table, privilege}` tuple we have a set of roles that the current user has. # If any of those roles are global, then it's equvilent to saying that the user can perform # `privilege` on `table` no matter what the scope. This function analyses the roles for a @@ -353,8 +398,15 @@ defmodule Electric.Satellite.Permissions do |> Map.new() end - defp build_roles(roles, auth) do + defp build_roles(roles, auth, assigns) do + # after a global (rules) permission change, we copy across all users' permissions without + # modification. if an assign is removed this may leave users with serialised roles with no + # corresponding assign. so we should filter a user's roles based on the set of existing + # assigns + assign_ids = MapSet.new(assigns, & &1.id) + roles + |> Stream.filter(&MapSet.member?(assign_ids, &1.assign_id)) |> Enum.map(&Role.new/1) |> add_authenticated(auth) |> add_anyone() @@ -453,15 +505,10 @@ defmodule Electric.Satellite.Permissions do "role #{inspect(role)} grant #{inspect(grant)} gives permission for #{inspect(change)}" ) - write_buffer = Graph.apply_change(write_buffer, perms.scopes, change) - write_buffer = - perms.triggers - |> Map.get(change.relation, []) - |> Enum.flat_map(fn trigger_fun -> - trigger_fun.(change, write_buffer, perms.auth) - end) - |> update_transient_roles(perms, write_buffer) + write_buffer + |> Graph.apply_change(perms.scopes, change) + |> apply_triggers(change, perms) {:cont, {:ok, write_buffer}} end @@ -469,6 +516,46 @@ defmodule Electric.Satellite.Permissions do ) end + defp apply_triggers(write_buffer, change, perms) do + %{auth: %{user_id: user_id}} = perms + + {changes, _user_id} = + Trigger.apply(change, perms.triggers, user_id, &null_trigger/2) + + update_transient_roles(changes, perms, write_buffer) + end + + defp null_trigger(_change, user_id) do + {[], user_id} + end + + defp trigger_callback(event, _change, user_id) do + case event do + {c, %{user_id: ^user_id} = role} when c in [:insert, :delete] -> + {[{c, Role.new(role)}], user_id} + + # update nothing to do with us + {c, _role} when c in [:insert, :delete] -> + {[], user_id} + + # update keeps role belonging to our user + {:update, %{user_id: ^user_id}, %{user_id: ^user_id} = new} -> + {[{:update, Role.new(new)}], user_id} + + # update has moved role to new user + {:update, %{user_id: ^user_id} = old, _new} -> + {[{:delete, Role.new(old)}], user_id} + + # update has moved role us + {:update, _old, %{user_id: ^user_id} = new} -> + {[{:insert, Role.new(new)}], user_id} + + # update nothing to do with us + {:update, _old, _new} -> + {[], user_id} + end + end + @spec verify_write(change(), t(), Graph.impl(), lsn()) :: RoleGrant.t() | {:error, String.t()} defp verify_write(change, perms, graph, lsn) do action = required_permission(change) @@ -602,7 +689,7 @@ defmodule Electric.Satellite.Permissions do end def update_transient_roles(role_changes, %__MODULE__{} = perms, write_buffer) do - %{source: %{grants: grants}} = perms + %{source: %{rules: %{grants: grants}}} = perms WriteBuffer.update_transient_roles(write_buffer, role_changes, grants) end diff --git a/components/electric/lib/electric/satellite/permissions/consumer.ex b/components/electric/lib/electric/satellite/permissions/consumer.ex new file mode 100644 index 0000000000..cc549b4ae4 --- /dev/null +++ b/components/electric/lib/electric/satellite/permissions/consumer.ex @@ -0,0 +1,345 @@ +defmodule Electric.Satellite.Permissions.Consumer do + alias Electric.DDLX.Command + alias Electric.Satellite.SatPerms + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Replication.Changes + alias Electric.Postgres.Extension + alias Electric.Satellite.Permissions.Trigger + + @electric_ddlx Extension.ddlx_relation() + + @enforce_keys [:rules, :schema] + + defstruct [:rules, :schema, triggers: %{}] + + @type name() :: Electric.Postgres.name() + @type trigger_fun() :: + (Changes.change(), SchemaLoader.t() -> {[Changes.change()], SchemaLoader.t()}) + + @type t() :: %__MODULE__{ + rules: %SatPerms.Rules{}, + schema: SchemaLoader.Version.t(), + triggers: %{Electric.Postgres.relation() => trigger_fun()} + } + + @doc """ + Creates a new permissions consumer state, based on the current global rules and the current schema version. + """ + @spec new(SchemaLoader.t()) :: {:ok, t()} | {:error, binary()} + def new(loader) do + with {:ok, schema_version} <- SchemaLoader.load(loader), + {:ok, rules} <- SchemaLoader.global_permissions(loader) do + {:ok, create_triggers(%__MODULE__{rules: rules, schema: schema_version})} + end + end + + defp create_triggers(state) do + triggers = + Trigger.assign_triggers(state.rules.assigns, state.schema, &update_roles_callback/3) + + %{state | triggers: triggers} + end + + @spec update_schema(t(), SchemaLoader.Version.t()) :: t() + def update_schema(state, %SchemaLoader.Version{} = schema_version) do + create_triggers(%{state | schema: schema_version}) + end + + def update(%Changes.Transaction{changes: changes} = tx, state, loader) do + {:ok, changes, state, loader} = update(changes, state, loader) + + {:ok, %{tx | changes: changes}, state, loader} + end + + def update(changes, state, loader) when is_list(changes) do + # group changes by relation -- this is really only to avoid churn on the global permissions + # rules which is an expensive operation. by grouping on the relation we can transform a series + # of ddlx permission commands into a single update to the global permissions struct + {changes, {state, loader}} = + changes + |> Stream.chunk_by(& &1.relation) + |> Enum.flat_map_reduce({state, loader}, &apply_changes/2) + + {:ok, changes, state, loader} + end + + # useful function for testing creation of global state + @doc false + def update_global(%SatPerms.DDLX{} = ddlx, loader) do + with {:ok, rules} <- SchemaLoader.global_permissions(loader) do + case mutate_global(ddlx, rules) do + {rules, 0} -> + {:ok, 0, loader, rules} + + {rules, n} -> + with {:ok, loader} <- SchemaLoader.save_global_permissions(loader, rules) do + {:ok, n, loader, rules} + end + end + end + end + + defp apply_changes([%{relation: @electric_ddlx} | _] = changes, {state, loader}) do + {:ok, rules} = SchemaLoader.global_permissions(loader) + + case Enum.reduce(changes, {rules, 0}, &apply_global_change/2) do + {_rules, 0} -> + {[], {state, loader}} + + {rules, _count} -> + {:ok, loader} = SchemaLoader.save_global_permissions(loader, rules) + + { + [updated_global_permissions(rules)], + {create_triggers(%{state | rules: rules}), loader} + } + end + end + + defp apply_changes(changes, {state, loader}) do + {changes, {_triggers, loader}} = + Enum.flat_map_reduce(changes, {state.triggers, loader}, &apply_triggers/2) + + {changes, {state, loader}} + end + + # the ddlx table is insert-only + defp apply_global_change(%Changes.NewRecord{} = change, {rules, count}) do + %{record: %{"ddlx" => ddlx_bytes}} = change + + pb_bytes = + case ddlx_bytes do + "\\x" <> rest -> Base.decode16!(rest, case: :lower) + bytes -> bytes + end + + {:ok, ddlx} = + Protox.decode(pb_bytes, SatPerms.DDLX) + + mutate_global(ddlx, rules, count) + end + + defp apply_triggers(change, {triggers, loader}) do + {changes, loader} = + Trigger.apply(change, triggers, loader) + + {changes, {triggers, loader}} + end + + defp update_roles_callback({:insert, role}, change, loader) do + {:ok, loader, perms} = mutate_user_perms(role, loader, &insert_role/2) + + { + [ + change, + updated_user_permissions(role.user_id, perms) + ], + loader + } + end + + defp update_roles_callback({:update, old_role, new_role}, change, loader) do + if old_role.user_id == new_role.user_id do + {:ok, loader, perms} = mutate_user_perms(new_role, loader, &update_role/2) + + { + [ + change, + updated_user_permissions(new_role.user_id, perms) + ], + loader + } + else + {:ok, loader, old_perms} = mutate_user_perms(old_role, loader, &delete_role/2) + {:ok, loader, new_perms} = mutate_user_perms(new_role, loader, &insert_role/2) + + { + [ + change, + updated_user_permissions(old_role.user_id, old_perms), + updated_user_permissions(new_role.user_id, new_perms) + ], + loader + } + end + end + + defp update_roles_callback({:delete, role}, change, loader) do + {:ok, loader, perms} = mutate_user_perms(role, loader, &delete_role/2) + + { + [ + change, + updated_user_permissions(role.user_id, perms) + ], + loader + } + end + + defp mutate_user_perms(role, loader, update_fun) do + with {:ok, loader, perms} <- SchemaLoader.user_permissions(loader, role.user_id), + {:ok, roles} <- update_fun.(perms, role), + roles = gc_roles(perms, roles) do + {:ok, _loader, _perms} = SchemaLoader.save_user_permissions(loader, role.user_id, roles) + end + end + + defp insert_role(perms, new_role) do + with roles <- load_roles(perms) do + {:ok, Map.update!(roles, :roles, &[new_role | &1])} + end + end + + defp update_role(perms, new_role) do + with roles <- load_roles(perms) do + {:ok, + Map.update!( + roles, + :roles, + &Enum.map(&1, fn role -> if role_match?(role, new_role), do: new_role, else: role end) + )} + end + end + + defp delete_role(perms, new_role) do + with roles <- load_roles(perms) do + {:ok, + Map.update!( + roles, + :roles, + &Enum.reject(&1, fn role -> role_match?(role, new_role) end) + )} + end + end + + def mutate_global(ddlx, rules, count \\ 0) + + def mutate_global( + %SatPerms.DDLX{grants: [], revokes: [], assigns: [], unassigns: []}, + rules, + count + ) do + {rules, count} + end + + def mutate_global(%SatPerms.DDLX{} = ddlx, rules, count) do + {apply_ddlx(rules, ddlx, count == 0), count + count_changes(ddlx)} + end + + def role_match?(role1, role2) do + role1.assign_id == role2.assign_id && role1.row_id == role2.row_id + end + + defp load_roles(perms) do + %{id: id, roles: role_list, rules: %{id: rules_id}} = perms + + %SatPerms.Roles{ + parent_id: id, + rules_id: rules_id, + roles: role_list + } + end + + defp gc_roles(perms, roles) do + valid_assigns = MapSet.new(perms.rules.assigns, & &1.id) + + %{roles | roles: Enum.filter(roles.roles, &MapSet.member?(valid_assigns, &1.assign_id))} + end + + @doc """ + the `%SatPerms.DDLX{}` struct contains multiple instances of say a `%SatPerms.Grant{}` but these + multiple instances are the result of a single command (e.g. a `GRANT ALL...` will result in 4 + separate entries in the `grants` list but represent a single statement). + + Thus the order they are applied in a migration is preserved by the ordering of the arrival of + the DDLX structs through the replication stream. + + Since each struct's id is a fingerprint that acts as a primary key, we just need to operate on + the existing rules keyed by this id. + """ + @spec apply_ddlx(%SatPerms.Rules{}, %SatPerms.DDLX{}) :: %SatPerms.Rules{} + def apply_ddlx(rules, ddlx, is_first? \\ true) + + def apply_ddlx(%SatPerms.Rules{} = rules, %SatPerms.DDLX{} = ddlx, is_first?) do + rules + |> update_grants(ddlx.grants) + |> update_revokes(ddlx.revokes) + |> update_assigns(ddlx.assigns) + |> update_unassigns(ddlx.unassigns) + |> increment_id(is_first?) + end + + defp update_grants(rules, grants) do + add_rules(rules, :grants, grants) + end + + defp update_revokes(rules, revokes) do + remove_rules(rules, :grants, revokes) + end + + defp update_assigns(rules, assigns) do + add_rules(rules, :assigns, assigns) + end + + defp update_unassigns(rules, unassigns) do + remove_rules(rules, :assigns, unassigns) + end + + defp add_rules(rules, key, updates) do + update_rules(rules, key, updates, fn update, existing -> + Map.put(existing, update.id, update) + end) + end + + defp remove_rules(rules, key, updates) do + update_rules(rules, key, updates, fn update, existing -> + Map.delete(existing, update.id) + end) + end + + defp update_rules(rules, key, updates, update_fun) do + Map.update!(rules, key, fn existing -> + existing = Map.new(existing, &{&1.id, &1}) + + # be absolutely sure that every permission struct has an id set + updates + |> Stream.map(&Command.put_id/1) + |> Enum.reduce(existing, update_fun) + |> Map.values() + end) + end + + defp increment_id(%{id: id} = rules, true) do + %{rules | id: id + 1, parent_id: id} + end + + defp increment_id(rules, false) do + rules + end + + defp count_changes(ddlx) do + [:grants, :revokes, :assigns, :unassigns] + |> Enum.reduce(0, fn key, count -> + count + length(Map.fetch!(ddlx, key)) + end) + end + + defp updated_user_permissions(user_id, permissions) do + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: user_id, + permissions: permissions + } + } + end + + defp updated_global_permissions(permissions) do + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: permissions.id + } + } + end +end diff --git a/components/electric/lib/electric/satellite/permissions/graph.ex b/components/electric/lib/electric/satellite/permissions/graph.ex index da56ec5d80..305f55ce4e 100644 --- a/components/electric/lib/electric/satellite/permissions/graph.ex +++ b/components/electric/lib/electric/satellite/permissions/graph.ex @@ -78,15 +78,6 @@ defmodule Electric.Satellite.Permissions.Graph do """ @callback parent(impl(), scope_root(), relation(), record()) :: {relation(), id()} | nil - @doc """ - Return the path through the tables' foreign keys that gets from the given relation to the root. - - If `relation` is the same as `root` then should return `[root]`. - - If there is no path from `relation` to `root`, returns `nil`. - """ - @callback relation_path(impl(), scope_root(), relation()) :: [relation(), ...] | nil - @behaviour __MODULE__ defguardp is_relation(r) when is_tuple(r) and tuple_size(r) == 2 @@ -250,13 +241,4 @@ defmodule Electric.Satellite.Permissions.Graph do def primary_key({module, state}, relation, record) do module.primary_key(state, relation, record) end - - @impl __MODULE__ - def relation_path(_impl, root, root) do - [root] - end - - def relation_path({module, state}, root, relation) do - module.relation_path(state, root, relation) - end end diff --git a/components/electric/lib/electric/satellite/permissions/role.ex b/components/electric/lib/electric/satellite/permissions/role.ex index bbba854d90..2b31a98f7f 100644 --- a/components/electric/lib/electric/satellite/permissions/role.ex +++ b/components/electric/lib/electric/satellite/permissions/role.ex @@ -33,7 +33,7 @@ defmodule Electric.Satellite.Permissions.Role do @spec new(%SatPerms.Role{} | predefined()) :: t() def new(%SatPerms.Role{} = role) do %__MODULE__{ - id: role.id, + id: role.row_id, role: role.role, user_id: role.user_id, assign_id: role.assign_id, diff --git a/components/electric/lib/electric/satellite/permissions/trigger.ex b/components/electric/lib/electric/satellite/permissions/trigger.ex index e4c928d89c..4ef754a992 100644 --- a/components/electric/lib/electric/satellite/permissions/trigger.ex +++ b/components/electric/lib/electric/satellite/permissions/trigger.ex @@ -1,107 +1,184 @@ defmodule Electric.Satellite.Permissions.Trigger do + alias Electric.Postgres.Extension.SchemaLoader alias Electric.Replication.Changes - alias Electric.Satellite.{Auth, SatPerms} - alias Electric.Satellite.Permissions - alias Electric.Satellite.Permissions.Graph - - @type assign_trigger_fun() :: - (Permissions.change(), Graph.impl(), Auth.t() -> [Permissions.Role.t()]) + alias Electric.Satellite.SatPerms + + @type role() :: %SatPerms.Role{} + @type role_event() :: + {:insert, new :: role()} + | {:update, old :: role(), new :: role()} + | {:delete, old :: role()} + @type callback_arg() :: term() + @type callback_result() :: {[term()], callback_arg()} + @type callback_fun() :: (role_event(), Changes.change(), callback_arg() -> callback_result()) + @type trigger_fun() :: (Changes.change(), callback_arg() -> callback_result()) + @type triggers() :: %{Electric.Postgres.relation() => trigger_fun()} + + @doc """ + Create a callback map for the given list of assignments. + + The callback map is a map of relation => function. + + The function expects to be called with two arguments: + + 1. The change struct from the logical replication stream + 2. Some user defined argument that will be passed to the final callback function + (`trigger_callback_function()`) + + The `trigger_callback_function()` is called with 3 arguments: + + 1. The role change event which is a map of the original change in the data to the resulting + change in role + 2. The original pg change event + 3. The second argument to the original callback + """ + @spec assign_triggers([%SatPerms.Assign{}], SchemaLoader.Version.t(), callback_fun()) :: + triggers() + def assign_triggers(assigns, schema_version, trigger_callback_fun) + when is_function(trigger_callback_fun, 3) do + assigns + |> Enum.map(&for_assign(&1, schema_version, trigger_callback_fun)) + |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) + end - @spec for_assign(%SatPerms.Assign{}) :: [{Permissions.relation(), assign_trigger_fun()}] - def for_assign(assign) do + @doc false + @spec for_assign(%SatPerms.Assign{}, SchemaLoader.Version.t(), callback_fun()) :: + {Electric.Postgres.relation(), trigger_fun()} + def for_assign(assign, schema_version, trigger_callback_fun) + when is_function(trigger_callback_fun, 3) do %{table: %{schema: schema, name: name}} = assign - [ - {{schema, name}, &role_for_assign(&1, &2, &3, assign)} - ] - end + relation = {schema, name} - defp role_for_assign(%Changes.NewRecord{} = insert, graph, auth, assign) do - scopes = role_scopes(insert, assign, graph) + {:ok, fks} = + case assign do + %{scope: %{schema: scope_schema, name: scope_table}} -> + SchemaLoader.Version.foreign_keys( + schema_version, + relation, + {scope_schema, scope_table} + ) - for {role, id} <- build_roles(insert, graph, auth, assign, scopes) do - {:insert, {insert.relation, id}, role} - end + %{scope: nil} -> + {:ok, []} + end + + {:ok, pks} = SchemaLoader.Version.primary_keys(schema_version, relation) + + assign_data = + assign + |> Map.from_struct() + |> Map.put( + :watch_columns, + Enum.reject(fks ++ [assign.user_column, assign.role_column], &is_nil/1) + ) + + { + relation, + &change_trigger(&1, &2, assign_data, pks, fks, trigger_callback_fun) + } end - defp role_for_assign(%Changes.UpdatedRecord{} = update, graph, auth, assign) do - scopes = role_scopes(update, assign, graph) + @doc """ + Apply the triggers to the given change. + + The `fallback` function is called when no trigger exists for the given relation. + """ + @spec apply(Changes.change(), triggers(), callback_arg(), trigger_fun()) :: callback_result() + def apply(change, triggers, callback_arg, fallback \\ &passthrough_trigger/2) + + def apply(%{relation: relation} = change, triggers, callback_arg, fallback) do + # TODO: altough this claims to support multiple triggers per relation, in reality + # if we were to have that it would be difficult to manage which of the triggers + # passes on the change data itself + # Perhaps should be re-written as pass on change plus any supplemental stream + # elements... + triggers + |> Map.get(relation, [fallback]) + |> Enum.flat_map_reduce(callback_arg, fn trigger_fun, arg -> + trigger_fun.(change, arg) + end) + end - for {role, id} <- build_roles(update, graph, auth, assign, scopes) do - {:update, {update.relation, id}, role} - end + # just pass through changes with no relation + def apply(change, _triggers, callback_arg, _fallback) do + {[change], callback_arg} end - defp role_for_assign(%Changes.DeletedRecord{} = delete, graph, auth, assign) do - # for deletes we need to know about the upstream graph because the local graph will already - # have the record as deleted, so we won't get scope information - upstream_graph = Electric.Satellite.Permissions.WriteBuffer.upstream_graph(graph) - scopes = role_scopes(delete, assign, upstream_graph) - - id = Graph.primary_key(graph, delete.relation, delete.old_record) - - # include a force delete for any roles in the buffer plus a delete for - # any roles in the underlying shape data - [ - {:delete, {delete.relation, id}} - | for( - {role, id} <- build_roles(delete, graph, auth, assign, scopes), - do: {:delete, {delete.relation, id}, role} - ) - ] + defp passthrough_trigger(change, arg) do + {[change], arg} end - defp build_roles(change, graph, auth, assign, scopes) do - record = - case change do - %Changes.DeletedRecord{old_record: record} -> record - %{record: record} -> record - end + defp change_trigger(%Changes.NewRecord{} = change, loader, assign, pks, fks, callback_fun) do + %{record: record} = change + + role = role(record, assign, pks, fks) + + callback_fun.({:insert, role}, change, loader) + end - %{user_id: user_id} = auth - %{user_column: user_column} = assign - - with ^user_id <- Map.get(record, user_column, nil), - role_name = role_name(record, assign) do - id = Graph.primary_key(graph, change.relation, record) - - Enum.map(scopes, fn scope -> - {%Permissions.Role{ - id: id, - role: role_name, - user_id: user_id, - assign_id: assign.id, - scope: scope - }, id} - end) + defp change_trigger(%Changes.UpdatedRecord{} = change, loader, assign, pks, fks, callback_fun) do + %{old_record: old, record: new, changed_columns: changed_columns} = change + + if MapSet.size(changed_columns) == 0 do + {[change], loader} else - _ -> [] + # if role as been detatched, e.g. by a fk with delete action "SET NULL" or the role value has + # been nulled, then delete the role + role_nulled? = + assign.watch_columns + |> Stream.filter(&MapSet.member?(changed_columns, &1)) + |> Stream.map(&Map.fetch!(new, &1)) + |> Enum.any?(&is_nil/1) + + if role_nulled? do + old_role = role(old, assign, pks, fks) + + callback_fun.({:delete, old_role}, change, loader) + else + old_role = role(old, assign, pks, fks) + new_role = role(new, assign, pks, fks) + + callback_fun.({:update, old_role, new_role}, change, loader) + end end end - defp role_name(record, assign) do - case assign do - %{role_name: role_name, role_column: column} - when role_name in [nil, ""] and is_binary(column) -> - Map.fetch!(record, column) + defp change_trigger(%Changes.DeletedRecord{} = change, loader, assign, pks, fks, callback_fun) do + %{old_record: record} = change - %{role_name: name, role_column: role_column} - when role_column in [nil, ""] and is_binary(name) -> - name - end + role = role(record, assign, pks, fks) + + callback_fun.({:delete, role}, change, loader) end - defp role_scopes(change, assign, graph) do - case assign do - %{scope: nil} -> - [nil] + defp role(record, assign, pks, fks) do + %SatPerms.Role{ + row_id: Enum.map(pks, &Map.fetch!(record, &1)), + role: role_name(record, assign), + user_id: Map.fetch!(record, assign.user_column), + assign_id: assign.id, + scope: role_scope(fks, record, assign) + } + end - %{scope: %{schema: schema, name: name}} -> - root = {schema, name} + defp role_name(_record, %{role_column: nil, role_name: role_name}) when is_binary(role_name) do + role_name + end - graph - |> Graph.scope_id(root, change) - |> Enum.map(fn {id, _} -> {root, id} end) - end + defp role_name(record, %{role_column: role_column}) when is_binary(role_column) do + Map.fetch!(record, role_column) + end + + defp role_scope(_fks, _record, %{scope: nil}) do + nil + end + + defp role_scope(fks, record, %{scope: %{schema: sname, name: tname}}) do + %SatPerms.Scope{table: role_table(sname, tname), id: Enum.map(fks, &Map.fetch!(record, &1))} + end + + defp role_table(schema, name) do + %SatPerms.Table{schema: schema, name: name} end end diff --git a/components/electric/lib/electric/satellite/permissions/write_buffer.ex b/components/electric/lib/electric/satellite/permissions/write_buffer.ex index a35abf11a4..180a6f9148 100644 --- a/components/electric/lib/electric/satellite/permissions/write_buffer.ex +++ b/components/electric/lib/electric/satellite/permissions/write_buffer.ex @@ -107,22 +107,26 @@ defmodule Electric.Satellite.Permissions.WriteBuffer do {__MODULE__, state(state, roles: roles, deleted_roles: deleted, role_grants: role_grants)} end - defp update_intermediate_role({:insert, {relation, id}, role}, {roles, deleted}) do + defp update_intermediate_role({:insert, role}, {roles, deleted}) do + key = role_key(role) + { - Map.put(roles, {relation, id}, role), - MapSet.delete(deleted, role_key(role)) + Map.put(roles, key, role), + MapSet.delete(deleted, key) } end - defp update_intermediate_role({:update, {relation, id}, role}, {roles, deleted}) do - {Map.put(roles, {relation, id}, role), deleted} + defp update_intermediate_role({:update, role}, {roles, deleted}) do + {Map.put(roles, role_key(role), role), deleted} end - defp update_intermediate_role({:delete, {relation, id}, role}, {roles, deleted}) do - case Map.pop(roles, {relation, id}) do + defp update_intermediate_role({:delete, role}, {roles, deleted}) do + key = role_key(role) + + case Map.pop(roles, key) do {nil, roles} -> # deleting a role that we haven't just written - {roles, MapSet.put(deleted, role_key(role))} + {roles, MapSet.put(deleted, key)} {%{}, roles} -> {roles, deleted} @@ -134,7 +138,7 @@ defmodule Electric.Satellite.Permissions.WriteBuffer do end defp role_key(role) do - {role.scope, role.assign_id, role.user_id, role.role} + {role.assign_id, role.id} end @moduledoc """ @@ -258,9 +262,4 @@ defmodule Electric.Satellite.Permissions.WriteBuffer do def modified_fks(state(upstream: upstream), root, update) do Permissions.Graph.modified_fks(upstream, root, update) end - - @impl Permissions.Graph - def relation_path(state(upstream: upstream), root, relation) do - Permissions.Graph.relation_path(upstream, root, relation) - end end diff --git a/components/electric/lib/electric/satellite/protobuf_messages.ex b/components/electric/lib/electric/satellite/protobuf_messages.ex index d54fa0da03..77155bb611 100644 --- a/components/electric/lib/electric/satellite/protobuf_messages.ex +++ b/components/electric/lib/electric/satellite/protobuf_messages.ex @@ -3595,7 +3595,7 @@ end, defmodule Electric.Satellite.SatPerms.Revoke do @moduledoc false - defstruct table: nil, role: nil, privilege: :DELETE, scope: nil, path: nil + defstruct id: "", table: nil, role: nil, privilege: :DELETE, scope: nil, path: nil ( ( @@ -3613,6 +3613,7 @@ [] |> encode_scope(msg) |> encode_path(msg) + |> encode_id(msg) |> encode_table(msg) |> encode_role(msg) |> encode_privilege(msg) @@ -3622,6 +3623,18 @@ [] [ + defp encode_id(acc, msg) do + try do + if msg.id == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_table(acc, msg) do try do if msg.table == nil do @@ -3723,6 +3736,11 @@ {0, _, _} -> raise %Protox.IllegalTagError{} + {1, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[id: Protox.Decode.validate_string(delimited)], rest} + {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -3846,6 +3864,7 @@ } def defs() do %{ + 1 => {:id, {:scalar, ""}, :string}, 2 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, 3 => {:role, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.RoleName}}, 4 => {:privilege, {:scalar, :DELETE}, {:enum, Electric.Satellite.SatPerms.Privilege}}, @@ -3860,6 +3879,7 @@ } def defs_by_name() do %{ + id: {1, {:scalar, ""}, :string}, path: {7, {:oneof, :_path}, {:message, Electric.Satellite.SatPerms.Path}}, privilege: {4, {:scalar, :DELETE}, {:enum, Electric.Satellite.SatPerms.Privilege}}, role: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.RoleName}}, @@ -3873,6 +3893,15 @@ @spec fields_defs() :: list(Protox.Field.t()) def fields_defs() do [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }, %{ __struct__: Protox.Field, json_name: "table", @@ -3923,6 +3952,35 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + [] + ), ( def field_def(:table) do {:ok, @@ -4092,6 +4150,9 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, ""} + end, def default(:table) do {:ok, nil} end, @@ -5780,7 +5841,7 @@ end, defmodule Electric.Satellite.SatPerms.Unassign do @moduledoc false - defstruct table: nil, user_column: nil, role_column: nil, role_name: nil, scope: nil + defstruct id: "", table: nil, user_column: nil, role_column: nil, role_name: nil, scope: nil ( ( @@ -5800,6 +5861,7 @@ |> encode_role_column(msg) |> encode_role_name(msg) |> encode_scope(msg) + |> encode_id(msg) |> encode_table(msg) end ) @@ -5807,6 +5869,18 @@ [] [ + defp encode_id(acc, msg) do + try do + if msg.id == "" do + acc + else + [acc, "\n", Protox.Encode.encode_string(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_table(acc, msg) do try do if msg.table == nil do @@ -5902,6 +5976,11 @@ {0, _, _} -> raise %Protox.IllegalTagError{} + {1, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[id: Protox.Decode.validate_string(delimited)], rest} + {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -6004,6 +6083,7 @@ } def defs() do %{ + 1 => {:id, {:scalar, ""}, :string}, 2 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, 3 => {:user_column, {:oneof, :_user_column}, :string}, 4 => {:role_column, {:oneof, :_role_column}, :string}, @@ -6018,6 +6098,7 @@ } def defs_by_name() do %{ + id: {1, {:scalar, ""}, :string}, role_column: {4, {:oneof, :_role_column}, :string}, role_name: {5, {:oneof, :_role_name}, :string}, scope: {6, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Table}}, @@ -6031,6 +6112,15 @@ @spec fields_defs() :: list(Protox.Field.t()) def fields_defs() do [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }, %{ __struct__: Protox.Field, json_name: "table", @@ -6081,6 +6171,35 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, ""}, + label: :optional, + name: :id, + tag: 1, + type: :string + }} + end + + [] + ), ( def field_def(:table) do {:ok, @@ -6283,6 +6402,9 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, ""} + end, def default(:table) do {:ok, nil} end, @@ -9285,7 +9407,7 @@ end, defmodule Electric.Satellite.SatPerms.Roles do @moduledoc false - defstruct roles: [] + defstruct id: 0, parent_id: nil, rules_id: 0, roles: [] ( ( @@ -9300,13 +9422,52 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_roles(msg) + [] + |> encode_parent_id(msg) + |> encode_id(msg) + |> encode_rules_id(msg) + |> encode_roles(msg) end ) [] [ + defp encode_id(acc, msg) do + try do + if msg.id == 0 do + acc + else + [acc, "\b", Protox.Encode.encode_uint64(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_parent_id(acc, msg) do + try do + case msg.parent_id do + nil -> [acc] + child_field_value -> [acc, "\x10", Protox.Encode.encode_uint64(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:parent_id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_rules_id(acc, msg) do + try do + if msg.rules_id == 0 do + acc + else + [acc, "\x18", Protox.Encode.encode_uint64(msg.rules_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:rules_id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_roles(acc, msg) do try do case msg.roles do @@ -9317,7 +9478,7 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] + [acc, "\"", Protox.Encode.encode_message(value)] end) ] end @@ -9363,7 +9524,19 @@ {0, _, _} -> raise %Protox.IllegalTagError{} + {1, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[id: value], rest} + {2, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[parent_id: value], rest} + + {3, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[rules_id: value], rest} + + {4, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -9426,7 +9599,12 @@ required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()} } def defs() do - %{2 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}} + %{ + 1 => {:id, {:scalar, 0}, :uint64}, + 2 => {:parent_id, {:oneof, :_parent_id}, :uint64}, + 3 => {:rules_id, {:scalar, 0}, :uint64}, + 4 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}} + } end @deprecated "Use fields_defs()/0 instead" @@ -9434,7 +9612,12 @@ required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()} } def defs_by_name() do - %{roles: {2, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}} + %{ + id: {1, {:scalar, 0}, :uint64}, + parent_id: {2, {:oneof, :_parent_id}, :uint64}, + roles: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}, + rules_id: {3, {:scalar, 0}, :uint64} + } end ) @@ -9442,13 +9625,40 @@ @spec fields_defs() :: list(Protox.Field.t()) def fields_defs() do [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }, %{ __struct__: Protox.Field, json_name: "roles", kind: :unpacked, label: :repeated, name: :roles, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Role} } ] @@ -9457,58 +9667,176 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:roles) do + def field_def(:id) do {:ok, %{ __struct__: Protox.Field, - json_name: "roles", - kind: :unpacked, - label: :repeated, - name: :roles, - tag: 2, - type: {:message, Electric.Satellite.SatPerms.Role} + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 }} end - def field_def("roles") do + def field_def("id") do {:ok, %{ __struct__: Protox.Field, - json_name: "roles", - kind: :unpacked, - label: :repeated, - name: :roles, - tag: 2, - type: {:message, Electric.Satellite.SatPerms.Role} + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 }} end [] ), - def field_def(_) do - {:error, :no_such_field} - end - ] - ) - - [] - - ( - @spec required_fields() :: [] - def required_fields() do - [] - end - ) + ( + def field_def(:parent_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end - ( - @spec syntax() :: atom() - def syntax() do - :proto3 - end - ) + def field_def("parentId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end - [ + def field_def("parent_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + ), + ( + def field_def(:rules_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }} + end + + def field_def("rulesId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }} + end + + def field_def("rules_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rulesId", + kind: {:scalar, 0}, + label: :optional, + name: :rules_id, + tag: 3, + type: :uint64 + }} + end + ), + ( + def field_def(:roles) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roles", + kind: :unpacked, + label: :repeated, + name: :roles, + tag: 4, + type: {:message, Electric.Satellite.SatPerms.Role} + }} + end + + def field_def("roles") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "roles", + kind: :unpacked, + label: :repeated, + name: :roles, + tag: 4, + type: {:message, Electric.Satellite.SatPerms.Role} + }} + end + + [] + ), + def field_def(_) do + {:error, :no_such_field} + end + ] + ) + + [] + + ( + @spec required_fields() :: [] + def required_fields() do + [] + end + ) + + ( + @spec syntax() :: atom() + def syntax() do + :proto3 + end + ) + + [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, 0} + end, + def default(:parent_id) do + {:error, :no_default_value} + end, + def default(:rules_id) do + {:ok, 0} + end, def default(:roles) do {:error, :no_default_value} end, @@ -11743,7 +12071,7 @@ end, defmodule Electric.Satellite.SatPerms do @moduledoc false - defstruct id: 0, rules: nil, roles: nil + defstruct id: 0, user_id: "", rules: nil, roles: [] ( ( @@ -11758,7 +12086,7 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_id(msg) |> encode_rules(msg) |> encode_roles(msg) + [] |> encode_id(msg) |> encode_user_id(msg) |> encode_rules(msg) |> encode_roles(msg) end ) @@ -11777,6 +12105,18 @@ reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ end end, + defp encode_user_id(acc, msg) do + try do + if msg.user_id == "" do + acc + else + [acc, "\x12", Protox.Encode.encode_string(msg.user_id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:user_id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_rules(acc, msg) do try do if msg.rules == nil do @@ -11791,10 +12131,17 @@ end, defp encode_roles(acc, msg) do try do - if msg.roles == nil do - acc - else - [acc, "\"", Protox.Encode.encode_message(msg.roles)] + case msg.roles do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\"", Protox.Encode.encode_message(value)] + end) + ] end rescue ArgumentError -> @@ -11842,6 +12189,11 @@ {value, rest} = Protox.Decode.parse_int64(bytes) {[id: value], rest} + {2, _, bytes} -> + {len, bytes} = Protox.Varint.decode(bytes) + {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) + {[user_id: Protox.Decode.validate_string(delimited)], rest} + {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -11858,13 +12210,8 @@ {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[ - roles: - Protox.MergeMessage.merge( - msg.roles, - Electric.Satellite.SatPerms.Roles.decode!(delimited) - ) - ], rest} + {[roles: msg.roles ++ [Electric.Satellite.SatPerms.Role.decode!(delimited)]], + rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -11924,8 +12271,9 @@ def defs() do %{ 1 => {:id, {:scalar, 0}, :int64}, + 2 => {:user_id, {:scalar, ""}, :string}, 3 => {:rules, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}}, - 4 => {:roles, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Roles}} + 4 => {:roles, :unpacked, {:message, Electric.Satellite.SatPerms.Role}} } end @@ -11936,8 +12284,9 @@ def defs_by_name() do %{ id: {1, {:scalar, 0}, :int64}, - roles: {4, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Roles}}, - rules: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}} + roles: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Role}}, + rules: {3, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Rules}}, + user_id: {2, {:scalar, ""}, :string} } end ) @@ -11955,6 +12304,15 @@ tag: 1, type: :int64 }, + %{ + __struct__: Protox.Field, + json_name: "userId", + kind: {:scalar, ""}, + label: :optional, + name: :user_id, + tag: 2, + type: :string + }, %{ __struct__: Protox.Field, json_name: "rules", @@ -11967,11 +12325,11 @@ %{ __struct__: Protox.Field, json_name: "roles", - kind: {:scalar, nil}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :roles, tag: 4, - type: {:message, Electric.Satellite.SatPerms.Roles} + type: {:message, Electric.Satellite.SatPerms.Role} } ] end @@ -12007,6 +12365,46 @@ [] ), + ( + def field_def(:user_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userId", + kind: {:scalar, ""}, + label: :optional, + name: :user_id, + tag: 2, + type: :string + }} + end + + def field_def("userId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userId", + kind: {:scalar, ""}, + label: :optional, + name: :user_id, + tag: 2, + type: :string + }} + end + + def field_def("user_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "userId", + kind: {:scalar, ""}, + label: :optional, + name: :user_id, + tag: 2, + type: :string + }} + end + ), ( def field_def(:rules) do {:ok, @@ -12042,11 +12440,11 @@ %{ __struct__: Protox.Field, json_name: "roles", - kind: {:scalar, nil}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :roles, tag: 4, - type: {:message, Electric.Satellite.SatPerms.Roles} + type: {:message, Electric.Satellite.SatPerms.Role} }} end @@ -12055,11 +12453,11 @@ %{ __struct__: Protox.Field, json_name: "roles", - kind: {:scalar, nil}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :roles, tag: 4, - type: {:message, Electric.Satellite.SatPerms.Roles} + type: {:message, Electric.Satellite.SatPerms.Role} }} end @@ -12092,11 +12490,14 @@ def default(:id) do {:ok, 0} end, + def default(:user_id) do + {:ok, ""} + end, def default(:rules) do {:ok, nil} end, def default(:roles) do - {:ok, nil} + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -14546,7 +14947,7 @@ end, defmodule Electric.Satellite.SatPerms.Rules do @moduledoc false - defstruct grants: [], assigns: [] + defstruct id: 0, parent_id: nil, grants: [], assigns: [] ( ( @@ -14561,13 +14962,40 @@ @spec encode!(struct) :: iodata | no_return def encode!(msg) do - [] |> encode_grants(msg) |> encode_assigns(msg) + [] + |> encode_parent_id(msg) + |> encode_id(msg) + |> encode_grants(msg) + |> encode_assigns(msg) end ) [] [ + defp encode_id(acc, msg) do + try do + if msg.id == 0 do + acc + else + [acc, "\b", Protox.Encode.encode_uint64(msg.id)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + end + end, + defp encode_parent_id(acc, msg) do + try do + case msg.parent_id do + nil -> [acc] + child_field_value -> [acc, "\x10", Protox.Encode.encode_uint64(child_field_value)] + end + rescue + ArgumentError -> + reraise Protox.EncodingError.new(:parent_id, "invalid field value"), __STACKTRACE__ + end + end, defp encode_grants(acc, msg) do try do case msg.grants do @@ -14578,7 +15006,7 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\n", Protox.Encode.encode_message(value)] + [acc, "\x1A", Protox.Encode.encode_message(value)] end) ] end @@ -14597,7 +15025,7 @@ [ acc, Enum.reduce(values, [], fn value, acc -> - [acc, "\x12", Protox.Encode.encode_message(value)] + [acc, "\"", Protox.Encode.encode_message(value)] end) ] end @@ -14644,13 +15072,21 @@ raise %Protox.IllegalTagError{} {1, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[id: value], rest} + + {2, _, bytes} -> + {value, rest} = Protox.Decode.parse_uint64(bytes) + {[parent_id: value], rest} + + {3, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) {[grants: msg.grants ++ [Electric.Satellite.SatPerms.Grant.decode!(delimited)]], rest} - {2, _, bytes} -> + {4, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) @@ -14715,8 +15151,10 @@ } def defs() do %{ - 1 => {:grants, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, - 2 => {:assigns, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}} + 1 => {:id, {:scalar, 0}, :uint64}, + 2 => {:parent_id, {:oneof, :_parent_id}, :uint64}, + 3 => {:grants, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, + 4 => {:assigns, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}} } end @@ -14726,8 +15164,10 @@ } def defs_by_name() do %{ - assigns: {2, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}}, - grants: {1, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}} + assigns: {4, :unpacked, {:message, Electric.Satellite.SatPerms.Assign}}, + grants: {3, :unpacked, {:message, Electric.Satellite.SatPerms.Grant}}, + id: {1, {:scalar, 0}, :uint64}, + parent_id: {2, {:oneof, :_parent_id}, :uint64} } end ) @@ -14736,13 +15176,31 @@ @spec fields_defs() :: list(Protox.Field.t()) def fields_defs() do [ + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }, %{ __struct__: Protox.Field, json_name: "grants", kind: :unpacked, label: :repeated, name: :grants, - tag: 1, + tag: 3, type: {:message, Electric.Satellite.SatPerms.Grant} }, %{ @@ -14751,7 +15209,7 @@ kind: :unpacked, label: :repeated, name: :assigns, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Assign} } ] @@ -14759,6 +15217,75 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), + ( + def field_def(:id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }} + end + + def field_def("id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "id", + kind: {:scalar, 0}, + label: :optional, + name: :id, + tag: 1, + type: :uint64 + }} + end + + [] + ), + ( + def field_def(:parent_id) do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + + def field_def("parentId") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + + def field_def("parent_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "parentId", + kind: {:oneof, :_parent_id}, + label: :proto3_optional, + name: :parent_id, + tag: 2, + type: :uint64 + }} + end + ), ( def field_def(:grants) do {:ok, @@ -14768,7 +15295,7 @@ kind: :unpacked, label: :repeated, name: :grants, - tag: 1, + tag: 3, type: {:message, Electric.Satellite.SatPerms.Grant} }} end @@ -14781,7 +15308,7 @@ kind: :unpacked, label: :repeated, name: :grants, - tag: 1, + tag: 3, type: {:message, Electric.Satellite.SatPerms.Grant} }} end @@ -14797,7 +15324,7 @@ kind: :unpacked, label: :repeated, name: :assigns, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Assign} }} end @@ -14810,7 +15337,7 @@ kind: :unpacked, label: :repeated, name: :assigns, - tag: 2, + tag: 4, type: {:message, Electric.Satellite.SatPerms.Assign} }} end @@ -14841,6 +15368,12 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), + def default(:id) do + {:ok, 0} + end, + def default(:parent_id) do + {:error, :no_default_value} + end, def default(:grants) do {:error, :no_default_value} end, @@ -15174,7 +15707,7 @@ end, defmodule Electric.Satellite.SatPerms.Scope do @moduledoc false - defstruct table: nil, id: "" + defstruct table: nil, id: [] ( ( @@ -15210,10 +15743,17 @@ end, defp encode_id(acc, msg) do try do - if msg.id == "" do - acc - else - [acc, "\x12", Protox.Encode.encode_string(msg.id)] + case msg.id do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\x12", Protox.Encode.encode_string(value)] + end) + ] end rescue ArgumentError -> @@ -15272,7 +15812,7 @@ {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[id: Protox.Decode.validate_string(delimited)], rest} + {[id: msg.id ++ [Protox.Decode.validate_string(delimited)]], rest} {tag, wire_type, rest} -> {_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest) @@ -15332,7 +15872,7 @@ def defs() do %{ 1 => {:table, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}}, - 2 => {:id, {:scalar, ""}, :string} + 2 => {:id, :unpacked, :string} } end @@ -15342,7 +15882,7 @@ } def defs_by_name() do %{ - id: {2, {:scalar, ""}, :string}, + id: {2, :unpacked, :string}, table: {1, {:scalar, nil}, {:message, Electric.Satellite.SatPerms.Table}} } end @@ -15364,8 +15904,8 @@ %{ __struct__: Protox.Field, json_name: "id", - kind: {:scalar, ""}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :id, tag: 2, type: :string @@ -15410,8 +15950,8 @@ %{ __struct__: Protox.Field, json_name: "id", - kind: {:scalar, ""}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :id, tag: 2, type: :string @@ -15423,8 +15963,8 @@ %{ __struct__: Protox.Field, json_name: "id", - kind: {:scalar, ""}, - label: :optional, + kind: :unpacked, + label: :repeated, name: :id, tag: 2, type: :string @@ -15461,7 +16001,7 @@ {:ok, nil} end, def default(:id) do - {:ok, ""} + {:error, :no_default_value} end, def default(_) do {:error, :no_such_field} @@ -16777,7 +17317,7 @@ end, defmodule Electric.Satellite.SatPerms.Role do @moduledoc false - defstruct id: "", role: "", user_id: "", assign_id: "", scope: nil + defstruct row_id: [], role: "", user_id: "", assign_id: "", scope: nil ( ( @@ -16794,7 +17334,7 @@ def encode!(msg) do [] |> encode_scope(msg) - |> encode_id(msg) + |> encode_row_id(msg) |> encode_role(msg) |> encode_user_id(msg) |> encode_assign_id(msg) @@ -16804,16 +17344,23 @@ [] [ - defp encode_id(acc, msg) do + defp encode_row_id(acc, msg) do try do - if msg.id == "" do - acc - else - [acc, "\n", Protox.Encode.encode_string(msg.id)] + case msg.row_id do + [] -> + acc + + values -> + [ + acc, + Enum.reduce(values, [], fn value, acc -> + [acc, "\n", Protox.Encode.encode_string(value)] + end) + ] end rescue ArgumentError -> - reraise Protox.EncodingError.new(:id, "invalid field value"), __STACKTRACE__ + reraise Protox.EncodingError.new(:row_id, "invalid field value"), __STACKTRACE__ end end, defp encode_role(acc, msg) do @@ -16903,7 +17450,7 @@ {1, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) {delimited, rest} = Protox.Decode.parse_delimited(bytes, len) - {[id: Protox.Decode.validate_string(delimited)], rest} + {[row_id: msg.row_id ++ [Protox.Decode.validate_string(delimited)]], rest} {2, _, bytes} -> {len, bytes} = Protox.Varint.decode(bytes) @@ -16995,7 +17542,7 @@ } def defs() do %{ - 1 => {:id, {:scalar, ""}, :string}, + 1 => {:row_id, :unpacked, :string}, 2 => {:role, {:scalar, ""}, :string}, 3 => {:user_id, {:scalar, ""}, :string}, 4 => {:assign_id, {:scalar, ""}, :string}, @@ -17010,8 +17557,8 @@ def defs_by_name() do %{ assign_id: {4, {:scalar, ""}, :string}, - id: {1, {:scalar, ""}, :string}, role: {2, {:scalar, ""}, :string}, + row_id: {1, :unpacked, :string}, scope: {5, {:oneof, :_scope}, {:message, Electric.Satellite.SatPerms.Scope}}, user_id: {3, {:scalar, ""}, :string} } @@ -17024,10 +17571,10 @@ [ %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, ""}, - label: :optional, - name: :id, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, tag: 1, type: :string }, @@ -17073,33 +17620,44 @@ [ @spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}), ( - def field_def(:id) do + def field_def(:row_id) do {:ok, %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, ""}, - label: :optional, - name: :id, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, tag: 1, type: :string }} end - def field_def("id") do + def field_def("rowId") do {:ok, %{ __struct__: Protox.Field, - json_name: "id", - kind: {:scalar, ""}, - label: :optional, - name: :id, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, tag: 1, type: :string }} end - [] + def field_def("row_id") do + {:ok, + %{ + __struct__: Protox.Field, + json_name: "rowId", + kind: :unpacked, + label: :repeated, + name: :row_id, + tag: 1, + type: :string + }} + end ), ( def field_def(:role) do @@ -17263,8 +17821,8 @@ [ @spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}), - def default(:id) do - {:ok, ""} + def default(:row_id) do + {:error, :no_default_value} end, def default(:role) do {:ok, ""} diff --git a/components/electric/lib/electric/satellite/protocol.ex b/components/electric/lib/electric/satellite/protocol.ex index 6269b52b9b..b06fbc0de0 100644 --- a/components/electric/lib/electric/satellite/protocol.ex +++ b/components/electric/lib/electric/satellite/protocol.ex @@ -705,6 +705,8 @@ defmodule Electric.Satellite.Protocol do def handle_outgoing_txs(events, state, acc \\ []) def handle_outgoing_txs([{tx, offset} | events], %State{} = state, acc) do + {tx, state} = manage_permissions_changes(tx, state) + filtered_tx = tx |> maybe_strip_migration_ddl(state.out_rep.last_migration_xid_at_initial_sync) @@ -736,6 +738,33 @@ defmodule Electric.Satellite.Protocol do {Enum.reverse(acc), state} end + defp manage_permissions_changes(tx, state) do + %{auth: %{user_id: user_id}} = state + + {changes, state} = + Enum.flat_map_reduce( + tx.changes, + state, + fn + %Changes.UpdatedPermissions{type: :user, permissions: %{user_id: ^user_id}}, state -> + Logger.debug(fn -> "User permissions updated for connection" end) + {[], state} + + %Changes.UpdatedPermissions{type: :user}, state -> + {[], state} + + %Changes.UpdatedPermissions{type: :global}, state -> + Logger.debug(fn -> "Global permissions updated for connection" end) + {[], state} + + change, state -> + {[change], state} + end + ) + + {%{tx | changes: changes}, state} + end + # If the client received at least one migration during the initial sync, the value of # last_migration_xid_at_initial_sync is non-zero. And due to the lag between any changes getting committed to the # database and those same changes getting propagated through the cached WAL, we may be looking at the same migration diff --git a/components/electric/priv/sql_function_templates/ddlx/assign.sql.eex b/components/electric/priv/sql_function_templates/ddlx/assign.sql.eex deleted file mode 100644 index 5e6eb4fb42..0000000000 --- a/components/electric/priv/sql_function_templates/ddlx/assign.sql.eex +++ /dev/null @@ -1,399 +0,0 @@ -CREATE OR REPLACE PROCEDURE <%= schema() %>.assign( - assignment_id text, - assign_table_full_name text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text, - if_fn text -) SECURITY DEFINER AS $$ - -DECLARE - assign_table TEXT; - assign_schema TEXT; - scope_table_not_null TEXT; - role_name_not_null TEXT; - role_column_not_null TEXT; - if_fn_not_null TEXT; - role_def TEXT; - user_column_type TEXT; - scope_key_count int; - user_key_count int; - scope_key RECORD; - user_key RECORD; - primary_key RECORD; - -BEGIN - - -- return types for the introspection of foreign keys - CREATE TEMP TABLE scope_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE user_fkeys - ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] - ); - - CREATE TEMP TABLE pkeys - ( - columns name[10], - types information_schema.character_data[10] - ); - - SELECT n[1], n[2] INTO assign_schema, assign_table FROM parse_ident(assign_table_full_name) n; - - -- gets the columns and types for the assign_table's primary key - INSERT INTO pkeys SELECT * from <%= schema() %>.find_pk(assign_schema, assign_table); - SELECT * FROM pkeys LIMIT 1 INTO primary_key; - - - -- gets the foreign key pointing to the user - INSERT INTO user_fkeys SELECT * from <%= schema() %>.find_fk_for_column(assign_schema,assign_table, user_column_name); - SELECT COUNT(*) FROM user_fkeys INTO user_key_count; - - IF user_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not find a foreign key pointing to the user table'; - END IF; - - SELECT * FROM user_fkeys LIMIT 1 INTO user_key; - - SELECT data_type FROM information_schema.columns - WHERE table_name = user_key.to_table and column_name = user_key.to_columns[1] - INTO user_column_type; - - -- sets some things to default strings if the function args are null - IF scope IS NULL THEN scope_table_not_null = '__none__'; ELSE scope_table_not_null = scope; END IF; - IF if_fn IS NULL THEN if_fn_not_null = 'true'; ELSE if_fn_not_null = if_fn; END IF; - - IF role_name_string IS NULL AND role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name'; - END IF; - - IF NOT role_name_string IS NULL AND NOT role_column_name IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'You must give either a role_name_string or a role_column_name but not both'; - END IF; - - -- assign_table_full_name = format('%s.%s', assign_schema, assign_table); - - IF role_name_string IS NULL THEN - role_name_not_null = '__none__'; - role_column_not_null = role_column_name; - role_def = format('NEW.%s', role_column_name); - ELSE - role_name_not_null = role_name_string; - role_column_not_null = '__none__'; - role_def = format(E'\'%s\'', role_name_string); - END IF; - - -- reads the foreign key for the scope if it exists - IF NOT scope IS NULL THEN - INSERT INTO scope_fkeys SELECT * from <%= schema() %>.find_fk_to_table(assign_schema,assign_table, scope); - SELECT COUNT(*) FROM scope_fkeys INTO scope_key_count; - - IF scope_key_count > 1 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Too many foreign keys for the scope table'; - END IF; - - IF scope_key_count = 0 THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - -- The assign_table is assumed to have a single foreign key pointing to the scope table - RAISE EXCEPTION 'Could not find a foreign key pointing to the scope table'; - END IF; - - SELECT * FROM scope_fkeys LIMIT 1 INTO scope_key; - - END IF; - - -- Creates the assignment itself. - INSERT INTO <%= assignments_table() %> (id, table_name, scope_table, user_column, role_name, role_column, if_fn) - VALUES (assignment_id, assign_table_full_name, scope_table_not_null, user_column_name, role_name_not_null, role_column_not_null, if_fn); - - if assignment_id IS NULL THEN - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; - RAISE EXCEPTION 'Could not create assignment'; - END IF; - - /* - Creates big fat join table. Every time the assignment rule is used and a user is given a role a row will be created - in both this join table and in the table electric.roles. This table serves as a polymorphic join between the roles - table and the different types of both scope table and assignment table, and handles clean up correctly via fk cascade on delete. - - This table have 4 or 5 foreign keys - - It has foreign keys with ON DELETE CASCADE pointing to: - - The assignment created above. This assignment is the rule that causes all the entries in this join to be created in owns them. - - The user that the role has been given too. - - The assignment table item that assigned the role. - - The row in the scope table if one is specified. - - So that any of these being deleted will remove the join. - - And it has a foreign key pointing to the role in electric.roles which it will delete with a trigger. - */ - - EXECUTE format('CREATE TABLE IF NOT EXISTS <%= schema() %>.assign_%s_join ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - user_id %s, - assignment_id text, - role_id uuid, - FOREIGN KEY(role_id) - REFERENCES <%= roles_table() %> (id), - FOREIGN KEY(user_id) - REFERENCES %s.%s(%s) - ON DELETE CASCADE, - FOREIGN KEY(assignment_id) - REFERENCES <%= assignments_table() %> (id) - ON DELETE CASCADE - );', - assignment_id, - user_key.to_types[1], - user_key.to_schema, - user_key.to_table, - user_key.to_columns[1] - ); - - -- Adds a foreign key to the join table pointing to the assign_table - for counter in 1..ARRAY_LENGTH(primary_key.columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema() %>.assign_%s_join ADD COLUMN IF NOT EXISTS %s_%s %s;', - assignment_id, - assign_table, - primary_key.columns[counter], - primary_key.types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema() %>.assign_%s_join - ADD CONSTRAINT electric_%s_join_%s_fk - FOREIGN KEY (%s_%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_id, - assignment_id, - assign_table, - assign_table, - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - assign_schema, - assign_table, - ARRAY_TO_STRING(primary_key.columns, ', ') - ); - - -- defines insert and update trigger functions for the assign_table - -- when there is no scope - IF scope IS NULL THEN - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema() %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - role_key uuid; - join_key uuid; - BEGIN - - SELECT id, role_id FROM <%= schema() %>.assign_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table() %> (user_id, role) - VALUES (NEW.%9$s, %10$s) returning id INTO role_key; - INSERT INTO <%= schema() %>.assign_%1$s_join (user_id, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema() %>.assign_%1$s_join SET user_id = NEW.%9$s - WHERE id = join_key; - UPDATE <%= roles_table() %> SET (user_id, role) = (NEW.%9$s, %10s) - WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema() %>.assign_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_id, - --2 - '', - --3 - '', - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope - ); - - -- and when there is a scope - ELSE - for counter in 1..ARRAY_LENGTH(scope_key.from_columns, 1) - loop - EXECUTE format('ALTER TABLE <%= schema() %>.assign_%s_join ADD COLUMN IF NOT EXISTS %s %s;', - assignment_id, - scope_key.from_columns[counter], - scope_key.to_types[counter] - ); - end loop; - - EXECUTE format('ALTER TABLE <%= schema() %>.assign_%s_join - ADD CONSTRAINT electric_%s_join_scope_fk - FOREIGN KEY (%s) - REFERENCES %s.%s(%s) - ON DELETE CASCADE;', - assignment_id, - assignment_id, - ARRAY_TO_STRING(scope_key.from_columns, ', '), - scope_key.to_schema, - scope_key.to_table, - ARRAY_TO_STRING(scope_key.to_columns, ', ') - ); - - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema() %>.upsert_role_%1$s() RETURNS TRIGGER SECURITY DEFINER - AS $%2$s$ - DECLARE - scope_key TEXT; - scope_list TEXT[]; - role_key uuid; - join_key uuid; - BEGIN - - scope_list := ARRAY[NEW.%3$s::text]; - scope_key := ARRAY_TO_STRING(scope_list, \', \' ); - - SELECT id, role_id FROM <%= schema() %>.assign_%1$s_join WHERE assignment_id = \'%4$s\' AND ( %5$s_%6$s ) = ( NEW.%7$s ) INTO join_key, role_key; - IF ( %8$s ) THEN - IF join_key IS NULL THEN - INSERT INTO <%= roles_table() %> (user_id, role, scope_table, scope_id) - VALUES (NEW.%9$s, %10$s, \'%11$s\', scope_key) returning id INTO role_key; - INSERT INTO <%= schema() %>.assign_%1$s_join (user_id, %12$s, %5$s_%6$s, role_id, assignment_id) - VALUES (NEW.%9$s, NEW.%13$s, NEW.%7$s, role_key, \'%4$s\'); - ELSE - UPDATE <%= schema() %>.assign_%1$s_join SET (user_id, %12$s) - = (NEW.%9$s, NEW.%13$s) WHERE id = join_key; - UPDATE <%= roles_table() %> SET (user_id, role, scope_table, scope_id) - = (NEW.%9$s, %10$s, \'%11$s\', scope_key) WHERE id = role_key; - END IF; - ELSE - IF NOT join_key IS NULL THEN - DELETE FROM <%= schema() %>.assign_%1$s_join WHERE id = join_key; - END IF; - END IF; - RETURN NEW; - END; - $%2$s$ LANGUAGE plpgsql;', - --1 - assignment_id, - --2 - '', - --3 - ARRAY_TO_STRING(scope_key.from_columns, '::text, NEW.'), - --4 - assignment_id, - --5 - assign_table, - --6 - ARRAY_TO_STRING(primary_key.columns, format(', %s_', assign_table)), - --7 - ARRAY_TO_STRING(primary_key.columns, ', NEW.'), - --8 - if_fn_not_null, - --9 - user_key.from_columns[1], - --10 - role_def, - --11 - scope, - --12 - ARRAY_TO_STRING(scope_key.from_columns, ', '), - --13 - ARRAY_TO_STRING(scope_key.from_columns, ', NEW.') - ); - END IF; - - -- adds a trigger to the join table that deletes the role itself - EXECUTE format(E'CREATE OR REPLACE FUNCTION <%= schema() %>.cleanup_role_%s() RETURNS TRIGGER SECURITY DEFINER - AS $%s$ - BEGIN - DELETE FROM <%= roles_table() %> WHERE id = OLD.role_id; - RETURN OLD; - END; - $%s$ LANGUAGE plpgsql;', - assignment_id, - '', - '' - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER "electric_cleanup_role_%s" - AFTER DELETE ON <%= schema() %>."assign_%s_join" - FOR EACH ROW - EXECUTE FUNCTION <%= schema() %>."cleanup_role_%s"();', - assignment_id, - assignment_id, - assignment_id - ); - - -- adds the insert and update triggers functions to the assign_table - EXECUTE format('CREATE OR REPLACE TRIGGER "electric_insert_role_%s" - AFTER INSERT ON %I - FOR EACH ROW - EXECUTE FUNCTION <%= schema() %>."upsert_role_%s"();', - assignment_id, - assign_table, - assignment_id - ); - - EXECUTE format('CREATE OR REPLACE TRIGGER "electric_update_role_%s" - AFTER UPDATE ON %I - FOR EACH ROW - EXECUTE FUNCTION <%= schema() %>."upsert_role_%s"();', - assignment_id, - assign_table, - assignment_id - ); - DROP TABLE scope_fkeys; - DROP TABLE user_fkeys; - DROP TABLE pkeys; -END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/priv/sql_function_templates/ddlx/grant.sql.eex b/components/electric/priv/sql_function_templates/ddlx/grant.sql.eex deleted file mode 100644 index 0a539e67c3..0000000000 --- a/components/electric/priv/sql_function_templates/ddlx/grant.sql.eex +++ /dev/null @@ -1,24 +0,0 @@ -CREATE OR REPLACE PROCEDURE <%= schema() %>.grant( - privilege_name text, - on_table_name text, - role_name text, - columns text[], - scope_name text, - using_path text, - check_fn text -) SECURITY DEFINER AS $$ - - DECLARE - col TEXT; - - BEGIN - FOREACH col IN ARRAY columns - LOOP - INSERT INTO <%= grants_table() %> ( privilege, on_table, role , column_name, scope, using_path, check_fn) - VALUES (privilege_name, on_table_name, role_name, col, scope_name, using_path, check_fn) - ON CONFLICT ON CONSTRAINT grants_pkey DO UPDATE SET - (using_path, check_fn) = (EXCLUDED.using_path, EXCLUDED.check_fn); - END LOOP; - END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex b/components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex deleted file mode 100644 index 98f9966854..0000000000 --- a/components/electric/priv/sql_function_templates/ddlx/unassign.sql.eex +++ /dev/null @@ -1,45 +0,0 @@ -CREATE OR REPLACE PROCEDURE <%= schema() %>.unassign( - assignment_id text, - assign_table_full_name text, - scope text, - user_column_name text, - role_name_string text, - role_column_name text -) SECURITY DEFINER AS $$ -BEGIN - - -- remove triggers - EXECUTE format('DROP TRIGGER IF EXISTS "electric_cleanup_role_%s" ON <%= schema() %>."assign_%s_join" CASCADE;', - assignment_id, - assignment_id - ); - - EXECUTE format('DROP TRIGGER IF EXISTS "electric_insert_role_%s" ON %I CASCADE;', - assignment_id, - assign_table_full_name - ); - - EXECUTE format('DROP TRIGGER IF EXISTS "electric_update_role_%s" ON %I CASCADE;', - assignment_id, - assign_table_full_name - ); - - -- remove functions - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema() %>."cleanup_role_%s" CASCADE;', - assignment_id - ); - - EXECUTE format('DROP FUNCTION IF EXISTS <%= schema() %>."upsert_role_%s" CASCADE;', - assignment_id - ); - - -- remove join table - EXECUTE format('DROP TABLE IF EXISTS <%= schema() %>."assign_%s_join" CASCADE;', - assignment_id - ); - - -- remove assignment - DELETE FROM <%= assignments_table() %> WHERE id = assignment_id; -END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex b/components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex deleted file mode 100644 index 577120e134..0000000000 --- a/components/electric/priv/sql_function_templates/find_fk_to_table.sql.eex +++ /dev/null @@ -1,47 +0,0 @@ -CREATE OR REPLACE FUNCTION <%= schema() %>.find_fk_to_table( - src_schema text, - src_table text, - dst_table text -) RETURNS TABLE ( - from_schema name, - from_table name, - from_columns name[10], - to_schema name, - to_table name, - to_columns name[10], - to_types information_schema.character_data[10] -) SECURITY DEFINER AS $$ - DECLARE - dst_schema name; - dst_name name; - BEGIN - -- dst_table is a quoted, fully qualified table, e.g. '"public"."assignments"' - SELECT n[1], n[2] INTO dst_schema, dst_name FROM parse_ident(dst_table) n; - - RETURN QUERY - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition) AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition) AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = src_table and column_name = col.attname) ORDER BY f_u.attposition) AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' - AND tbl.relname = src_table - AND f_tbl.relname = dst_name - AND sch.nspname = src_schema - AND f_sch.nspname = dst_schema - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - END; -$$ LANGUAGE plpgsql; - diff --git a/components/electric/src/electric_ddlx_parser.yrl b/components/electric/src/electric_ddlx_parser.yrl index a302a5768f..93e78a12bf 100644 --- a/components/electric/src/electric_ddlx_parser.yrl +++ b/components/electric/src/electric_ddlx_parser.yrl @@ -9,6 +9,8 @@ Nonterminals sqlite_stmt table_ident identifier + record + field_access scoped_role grant_scoped_role scope @@ -38,7 +40,7 @@ Terminals 'GRANT' 'ON' 'USING' 'SELECT' 'INSERT' 'UPDATE' 'DELETE' 'ALL' 'READ' 'WRITE' 'WHERE' 'REVOKE' 'FROM' 'SQLITE' 'AUTHENTICATED' 'ANYONE' 'PRIVILEGES' - string int float + string integer float unquoted_identifier quoted_identifier '=' '>' '<' '<=' '>=' '!=' '<>' '+' '/' '*' '-' 'AND' 'IS' 'NOT' 'OR' @@ -100,6 +102,10 @@ table_ident -> identifier '.' identifier : [{table_schema, '$1'}, {table_name, ' identifier -> unquoted_identifier : unquoted_identifier('$1'). identifier -> quoted_identifier : unwrap('$1'). +%% upcase the record name, so e.g. it's always `AUTH.user_id`, `NEW.field_name` etc +record -> unquoted_identifier : 'Elixir.String':upcase(unwrap('$1')). +record -> quoted_identifier : 'Elixir.String':upcase(unwrap('$1')). + grant_scoped_role -> 'AUTHENTICATED' : [{role_name, 'AUTHENTICATED'}]. grant_scoped_role -> 'ANYONE' : [{role_name, 'ANYONE'}]. grant_scoped_role -> scoped_role : '$1'. @@ -128,6 +134,7 @@ if_expr -> '(' expr ')' : [{'if', erlang:iolist_to_binary('$2')}]. expr -> '(' expr ')' : ["(", '$2', ")"]. expr -> expr op expr : ['$1', " ", '$2', " ", '$3']. %[{expr, [{op, '$2'}, {left, '$1'}, {right, '$3'}]}]. +expr -> field_access : ['$1']. expr -> identifier '(' func_args ')' : ['$1', "(", '$3', ")"]. % [{func_call, '$1', '$3'}]. expr -> identifier : ['$1']. % [{name, '$1'}]. expr -> const : ['$1']. % [{const, '$1'}]. @@ -148,8 +155,10 @@ op -> 'OR' : ["OR"]. op -> 'NOT' : ["NOT"]. op -> 'IS' : ["IS"]. +field_access -> record '.' identifier : ['$1', ".", '$3']. + const -> string : ["'", unwrap('$1'), "'"]. -const -> int : erlang:integer_to_list(unwrap('$1')). +const -> integer : erlang:integer_to_list(unwrap('$1')). const -> float : erlang:float_to_list(unwrap('$1')). func_args -> '$empty' : []. diff --git a/components/electric/test/electric/ddlx/command_test.exs b/components/electric/test/electric/ddlx/command_test.exs index 86940ee5de..1c4b181ea8 100644 --- a/components/electric/test/electric/ddlx/command_test.exs +++ b/components/electric/test/electric/ddlx/command_test.exs @@ -36,8 +36,7 @@ defmodule Electric.DDLX.CommandTest do ddlx = "ELECTRIC ASSIGN (projects, memberships.role) TO memberships.user_id" assert [ - ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex, - ~S[CALL electric.assign(] <> args + ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex ] = pg_sql(ddlx) assert %SatPerms.DDLX{assigns: [assign]} = parse_pb(hex) @@ -48,27 +47,13 @@ defmodule Electric.DDLX.CommandTest do user_column: "user_id", role_column: "role" } = assign - - args = - String.split(args, "\n", trim: true) |> Enum.map(&String.trim/1) |> Enum.slice(0..-2//1) - - assert args == [ - "assignment_id => 'cfl4yau3uwjlscmzukhavbniggdrpenr',", - "assign_table_full_name => '\"public\".\"memberships\"',", - "scope => '\"public\".\"projects\"',", - "user_column_name => 'user_id',", - "role_name_string => NULL,", - "role_column_name => 'role',", - "if_fn => NULL" - ] end test "ELECTRIC UNASSIGN" do ddlx = "ELECTRIC UNASSIGN (projects, memberships.role) FROM memberships.user_id" assert [ - ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex, - ~S[CALL electric.unassign(] <> args + ~S[INSERT INTO "electric"."ddlx_commands" (ddlx) VALUES ('\x] <> hex ] = pg_sql(ddlx) assert %SatPerms.DDLX{unassigns: [unassign]} = parse_pb(hex) @@ -79,18 +64,6 @@ defmodule Electric.DDLX.CommandTest do user_column: "user_id", role_column: "role" } = unassign - - args = - String.split(args, "\n", trim: true) |> Enum.map(&String.trim/1) |> Enum.slice(0..-2//1) - - assert args == [ - "assignment_id => 'cfl4yau3uwjlscmzukhavbniggdrpenr',", - "assign_table_full_name => '\"public\".\"memberships\"',", - "scope => '\"public\".\"projects\"',", - "user_column_name => 'user_id',", - "role_name_string => NULL,", - "role_column_name => 'role'" - ] end test "ELECTRIC GRANT" do diff --git a/components/electric/test/electric/ddlx/ddlx_commands_test.exs b/components/electric/test/electric/ddlx/ddlx_commands_test.exs deleted file mode 100644 index 5c36cb5982..0000000000 --- a/components/electric/test/electric/ddlx/ddlx_commands_test.exs +++ /dev/null @@ -1,165 +0,0 @@ -defmodule Electric.DDLX.DDLXCommandsTest do - use Electric.Extension.Case, async: false - import ElectricTest.DDLXHelpers - - alias Electric.Satellite.SatPerms - alias ElectricTest.PermissionsHelpers.Proto - - @moduletag ddlx: true - - describe "creating rows in postgres from command structs" do - test_tx "assign creates an assignment", fn conn -> - # {:ok, conn} = init_helper_db() - # setup_ddlx(conn) - - projects_sql = """ - CREATE TABLE projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id) - REFERENCES projects(id) - ); - """ - - query(conn, memberships_sql) - - assign = %SatPerms.Assign{ - table: Proto.table("public", "memberships"), - user_column: "user_id", - scope: Proto.table("public", "projects"), - role_name: nil, - role_column: "role", - if: "hello" - } - - query(conn, Electric.DDLX.command_to_postgres(assign)) - - assert_rows_slice( - conn, - "electric.assignments", - [ - [ - quote_table(assign.table), - quote_table(assign.scope), - "user_id", - "__none__", - "role", - "hello" - ] - ], - 1..6 - ) - end - - test_tx "unassign", fn conn -> - projects_sql = """ - CREATE TABLE public.projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships ( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL REFERENCES public.projects(id), - user_id uuid NOT NULL REFERENCES public.users (id) - ); - """ - - query(conn, memberships_sql) - - assign = %SatPerms.Assign{ - table: Proto.table("public", "memberships"), - user_column: "user_id", - scope: Proto.table("public", "projects"), - role_name: nil, - role_column: "role", - if: "hello" - } - - query(conn, Electric.DDLX.command_to_postgres(assign)) - - assert_rows_slice( - conn, - "electric.assignments", - [ - [ - quote_table(assign.table), - quote_table(assign.scope), - "user_id", - "__none__", - "role", - "hello" - ] - ], - 1..6 - ) - - unassign = %SatPerms.Unassign{ - table: Proto.table("public", "memberships"), - user_column: "user_id", - scope: Proto.table("public", "projects"), - role_name: nil, - role_column: "role" - } - - query(conn, Electric.DDLX.command_to_postgres(unassign)) - - assert_rows_slice( - conn, - "electric.assignments", - [], - 1..6 - ) - end - - # test_tx "disable", fn conn -> - # disable = %Disable{ - # table_name: "test" - # } - # - # {:ok, _, _result} = query(conn, Electric.DDLX.command_to_postgres(disable)) - # end - # - # test_tx "sqlite", fn conn -> - # sqlite = %SQLite{ - # sqlite_statement: "--hello" - # } - # - # {:ok, _, _result} = query(conn, Electric.DDLX.command_to_postgres(sqlite)) - # end - end -end diff --git a/components/electric/test/electric/ddlx/ddlx_postgres_test.exs b/components/electric/test/electric/ddlx/ddlx_postgres_test.exs deleted file mode 100644 index b6e6fe5536..0000000000 --- a/components/electric/test/electric/ddlx/ddlx_postgres_test.exs +++ /dev/null @@ -1,1159 +0,0 @@ -defmodule Electric.DDLX.DDLXPostgresTest do - use Electric.Extension.Case, async: false - import ElectricTest.DDLXHelpers - - @moduletag ddlx: true - - def assign_id do - :crypto.strong_rand_bytes(16) |> Base.encode32(case: :lower, padding: false) - end - - def list_tables(conn, schema \\ "public") do - {:ok, _cols, rows} = - query( - conn, - "select table_name from information_schema.tables WHERE table_schema = $1", - [schema] - ) - - for [col | _] <- rows, do: col - end - - def assert_tables(conn, table_names) do - existing = list_tables(conn) - assert MapSet.new(existing) == MapSet.new(table_names) - end - - def assert_table(conn, table_name, desired_columns) do - existing_columns = list_columns(conn, table_name) - - Enum.each(desired_columns, fn {column_name, assertions} -> - for {attribute_name, value} <- assertions do - # IO.inspect(existing_columns[column_name][attribute_name]) - # IO.inspect(value) - assert( - existing_columns[column_name][attribute_name] == value, - "Column assertion failed on #{table_name} #{column_name} #{attribute_name}, #{existing_columns[column_name][attribute_name]} != #{value}\n" - ) - end - end) - end - - def list_columns(conn, table_name) do - {:ok, columns, rows} = - query(conn, "select * from information_schema.columns WHERE table_name = $1", [table_name]) - - column_names = Enum.map(columns, &elem(&1, 1)) - column_name_index = Enum.find_index(column_names, &(&1 == "column_name")) - - for row <- rows, into: %{} do - column_name = Enum.at(row, column_name_index) - - attrs = - for {k, v} <- Enum.zip(column_names, row), into: %{} do - {k, v} - end - - {column_name, attrs} - end - end - - def get_foreign_keys(conn, table_name) do - query_str = """ - SELECT sch.nspname AS "from_schema", - tbl.relname AS "from_table", - ARRAY_AGG(col.attname ORDER BY u.attposition)::text[] AS "from_columns", - f_sch.nspname AS "to_schema", - f_tbl.relname AS "to_table", - ARRAY_AGG(f_col.attname ORDER BY f_u.attposition)::text[] AS "to_columns", - ARRAY_AGG((SELECT data_type FROM information_schema.columns WHERE table_name = $1 and column_name = col.attname) ORDER BY f_u.attposition)::text[] AS "to_types" - FROM pg_constraint c - LEFT JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE - LEFT JOIN LATERAL UNNEST(c.confkey) WITH ORDINALITY AS f_u(attnum, attposition) ON f_u.attposition = u.attposition - JOIN pg_class tbl ON tbl.oid = c.conrelid - JOIN pg_namespace sch ON sch.oid = tbl.relnamespace - LEFT JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum) - LEFT JOIN pg_class f_tbl ON f_tbl.oid = c.confrelid - LEFT JOIN pg_namespace f_sch ON f_sch.oid = f_tbl.relnamespace - LEFT JOIN pg_attribute f_col ON (f_col.attrelid = f_tbl.oid AND f_col.attnum = f_u.attnum) - WHERE c.contype = 'f' and tbl.relname = $2 - GROUP BY "from_schema", "from_table", "to_schema", "to_table" - ORDER BY "from_schema", "from_table"; - """ - - {:ok, _cols, rows} = query(conn, query_str, [table_name, table_name]) - - rows - end - - describe "testing creation of table and functions in postgres on init" do - test_tx "creates grants table", fn conn -> - grants_column_asserts = %{ - "privilege" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "on_table" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "role" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "column_name" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "scope" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "using_path" => %{ - "udt_name" => "text", - "is_nullable" => "YES" - }, - "check_fn" => %{ - "udt_name" => "text", - "is_nullable" => "YES" - } - } - - assert_table(conn, "grants", grants_column_asserts) - end - - test_tx "creates assignments table", fn conn -> - assignments_column_asserts = %{ - "id" => %{ - "udt_name" => "text", - "is_nullable" => "NO" - }, - "table_name" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "scope_table" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "user_column" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "role_name" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "role_column" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "if_fn" => %{ - "udt_name" => "text", - "is_nullable" => "YES" - } - } - - assert_table(conn, "assignments", assignments_column_asserts) - end - - test_tx "creates roles table", fn conn -> - roles_column_asserts = %{ - "id" => %{ - "udt_name" => "uuid", - "is_nullable" => "NO" - }, - "role" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "user_id" => %{ - "udt_name" => "varchar", - "is_nullable" => "NO" - }, - "scope_table" => %{ - "udt_name" => "varchar", - "is_nullable" => "YES" - }, - "scope_id" => %{ - "udt_name" => "varchar", - "is_nullable" => "YES" - } - } - - assert_table(conn, "roles", roles_column_asserts) - end - - test_tx "add ddlx functions", fn conn -> - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type IN ('FUNCTION', 'PROCEDURE') - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - expected_funcs = [ - "enable", - "disable", - "grant", - "revoke", - "assign", - "unassign", - "sqlite", - "find_fk_to_table", - "find_fk_for_column", - "find_pk" - ] - - installed_funcs = List.flatten(rows) - - for f <- expected_funcs do - assert f in installed_funcs - end - end - end - - def set_up_assignment(conn) do - projects_sql = """ - CREATE TABLE public.projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id) - REFERENCES projects(id) - ); - """ - - query(conn, memberships_sql) - end - - def set_up_assignment_compound(conn) do - projects_sql = """ - CREATE TABLE public.projects( - id uuid DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL, - PRIMARY KEY (id, name) - ); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - project_name VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id, project_name) - REFERENCES projects(id, name) - ); - """ - - query(conn, memberships_sql) - end - - def set_up_assignment_compound_membership(conn) do - projects_sql = """ - CREATE TABLE public.projects( - id uuid DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL, - PRIMARY KEY (id, name) - ); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - role VARCHAR(64) NOT NULL, - project_id uuid NOT NULL, - project_name VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id), - CONSTRAINT project_fk - FOREIGN KEY(project_id, project_name) - REFERENCES projects(id, name), - PRIMARY KEY (user_id, project_id, project_name) - ); - """ - - query(conn, memberships_sql) - end - - describe "testing postgres functions" do - test_tx "adding a grant", fn conn -> - pg_sql = """ - CALL electric.grant('update', 'things', 'admin' , ARRAY['one', 'two'], 'project', 'project_id', 'function body') - """ - - query(conn, pg_sql) - - assert_rows( - conn, - "electric.grants", - [ - ["update", "things", "admin", "one", "project", "project_id", "function body"], - ["update", "things", "admin", "two", "project", "project_id", "function body"] - ] - ) - end - - test_tx "removing a grant", fn conn -> - pg_sql = """ - CALL electric.grant('update', '"public"."things"', 'admin' , ARRAY['one', 'two'], '"public"."project"', 'project_id', 'function body') - """ - - query(conn, pg_sql) - - assert_rows( - conn, - "electric.grants", - [ - [ - "update", - ~s["public"."things"], - "admin", - "one", - ~s["public"."project"], - "project_id", - "function body" - ], - [ - "update", - ~s["public"."things"], - "admin", - "two", - ~s["public"."project"], - "project_id", - "function body" - ] - ] - ) - - pg_sql2 = """ - CALL electric.revoke('update', '"public"."things"', 'admin' , ARRAY['one'], '"public"."project"') - """ - - query(conn, pg_sql2) - - assert_rows( - conn, - "electric.grants", - [ - [ - "update", - ~s["public"."things"], - "admin", - "two", - ~s["public"."project"], - "project_id", - "function body" - ] - ] - ) - end - - test_tx "assign creates an assignment", fn conn -> - set_up_assignment(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - assert_rows_slice( - conn, - "electric.assignments", - [ - [ - assign_id, - ~s["public"."memberships"], - ~s["public"."projects"], - "user_id", - "__none__", - "role", - "hello" - ] - ], - 0..6 - ) - end - - test_tx "assign with scope compound key makes join table", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - {:ok, _, rows} = query(conn, "select * from electric.assignments") - - row = List.first(rows) - - assert Enum.slice(row, 1..6) == [ - ~s["public"."memberships"], - ~s["public"."projects"], - "user_id", - "__none__", - "role", - "hello" - ] - - ## checking the join table that is created - assignment_id = List.first(row) - uuid_string = assignment_id |> String.replace("-", "_") - join_table_name = "assign_#{uuid_string}_join" - - tables = list_tables(conn, "electric") - - assert join_table_name in tables - columns = list_columns(conn, join_table_name) - - assert %{ - "assignment_id" => _, - "id" => _, - "project_id" => _, - "project_name" => _, - "memberships_id" => _, - "user_id" => _ - } = columns - - fks = get_foreign_keys(conn, join_table_name) - - assert Enum.sort([ - [ - "electric", - join_table_name, - ["assignment_id"], - "electric", - "assignments", - ["id"], - ["text"] - ], - [ - "electric", - join_table_name, - ["role_id"], - "electric", - "roles", - ["id"], - ["uuid"] - ], - [ - "electric", - join_table_name, - ["memberships_id"], - "public", - "memberships", - ["id"], - ["uuid"] - ], - [ - "electric", - join_table_name, - ["project_id", "project_name"], - "public", - "projects", - ["id", "name"], - ["uuid", "character varying"] - ], - [ - "electric", - join_table_name, - ["user_id"], - "public", - "users", - ["id"], - ["uuid"] - ] - ]) == Enum.sort(fks) - end - - test_tx "assign makes functions and triggers", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - {:ok, _, rows} = query(conn, "select * from electric.assignments") - - row = List.first(rows) - - assignment_id = List.first(row) - uuid_string = assignment_id |> String.replace("-", "_") - - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type = 'FUNCTION' - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - assert ["upsert_role_#{uuid_string}"] in rows - assert ["cleanup_role_#{uuid_string}"] in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'memberships'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - - assert ["electric_insert_role_#{uuid_string}"] in rows - assert ["electric_update_role_#{uuid_string}"] in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'assign_#{uuid_string}_join'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - assert ["electric_cleanup_role_#{uuid_string}"] in rows - end - - test_tx "role assignment", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2); - """ - - {:ok, _, _rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - end - - test_tx "role assignment with compound membership pk", fn conn -> - set_up_assignment_compound_membership(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2); - """ - - {:ok, _, _rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - end - - test_tx "dupelicate assignment fails", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE' - ); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE' - ); - """ - - {:error, {:error, :error, _code, :unique_violation, _message, params}} = query(conn, pg_sql) - assert params[:constraint_name] == "unique_assign" - end - - test_tx "role update", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'TRUE'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2) returning id; - """ - - {:ok, _, rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - membership_id = List.first(List.first(rows)) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - - update_membership_sql = """ - UPDATE memberships SET role = 'member' WHERE id = $1; - """ - - {:ok, _, _rows} = query(conn, update_membership_sql, [membership_id]) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "member", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - end - - test_tx "role removed by func", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => 'admin', - role_column_name => null, - if_fn => E'NEW.role = \\'admin\\''); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - ## add a user, project and membership - - add_project_sql = """ - INSERT INTO projects ( name ) VALUES ( 'project_1' ) returning id; - """ - - {:ok, _query, rows} = query(conn, add_project_sql) - - project_id = List.first(List.first(rows)) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _, rows} = query(conn, add_user_sql) - - person_id = List.first(List.first(rows)) - - add_membership_sql = """ - INSERT INTO memberships ( role, project_id, project_name, user_id ) VALUES ( 'admin', $1, 'project_1', $2) returning id; - """ - - {:ok, _, rows} = - query(conn, add_membership_sql, [project_id, person_id]) - - membership_id = List.first(List.first(rows)) - - assert_rows_slice( - conn, - "electric.roles", - [ - [ - "admin", - person_id, - ~s["public"."projects"], - "#{project_id}, project_1" - ] - ], - 1..4 - ) - - update_membership_sql = """ - UPDATE memberships SET role = 'member' WHERE id = $1; - """ - - {:ok, _, _rows} = query(conn, update_membership_sql, [membership_id]) - - assert_rows_slice( - conn, - "electric.roles", - [], - 1..4 - ) - end - - test_tx "assign with no scope from string and update", fn conn -> - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id) - ); - """ - - query(conn, memberships_sql) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => null, - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => null); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - assert_rows_slice( - conn, - "electric.assignments", - [[~s["public"."memberships"], "__none__", "user_id", "__none__", "role", nil]], - 1..6 - ) - - add_user_sql = """ - INSERT INTO users ( name ) VALUES ( 'paul' ) returning id; - """ - - {:ok, _cols, rows} = query(conn, add_user_sql) - - [[person_id | _] | _] = rows - - add_membership_sql = """ - INSERT INTO memberships ( role, user_id ) VALUES ( 'admin', $1) returning id; - """ - - {:ok, _, rows} = query(conn, add_membership_sql, [person_id]) - - [[membership_id | _] | _] = rows - - assert_rows_slice( - conn, - "electric.roles", - [["admin", person_id, nil, nil]], - 1..4 - ) - - update_membership_sql = """ - UPDATE memberships SET role = 'member' WHERE id = $1; - """ - - {:ok, _, _rows} = query(conn, update_membership_sql, [membership_id]) - - assert_rows_slice( - conn, - "electric.roles", - [["member", person_id, nil, nil]], - 1..4 - ) - end - - test_tx "assign fails with bad scope", fn conn -> - projects_sql = """ - CREATE TABLE public.projects( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, projects_sql) - - users_sql = """ - CREATE TABLE public.users( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - name VARCHAR(64) NOT NULL); - """ - - query(conn, users_sql) - - memberships_sql = """ - CREATE TABLE public.memberships( - id uuid PRIMARY KEY DEFAULT gen_random_uuid(), - role VARCHAR(64) NOT NULL, - user_id uuid NOT NULL, - CONSTRAINT user_fk - FOREIGN KEY(user_id) - REFERENCES users(id) - ); - """ - - query(conn, memberships_sql) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => 'member', - role_column_name => null, - if_fn => null); - """ - - {:error, _error} = query(conn, pg_sql) - end - - test_tx "unassign cleans up", fn conn -> - set_up_assignment_compound(conn) - - assign_id = assign_id() - - pg_sql = """ - CALL electric.assign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role', - if_fn => 'hello'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - {:ok, _, rows} = query(conn, "select * from electric.assignments") - row = List.first(rows) - - assert Enum.slice(row, 1..6) == [ - ~s["public"."memberships"], - ~s["public"."projects"], - "user_id", - "__none__", - "role", - "hello" - ] - - ## checking the join table that is created - assignment_id = List.first(row) - uuid_string = assignment_id |> String.replace("-", "_") - - join_table_name = "assign_#{uuid_string}_join" - - tables = list_tables(conn, "electric") - - assert join_table_name in tables - - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type = 'FUNCTION' - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - assert ["upsert_role_#{uuid_string}"] in rows - assert ["cleanup_role_#{uuid_string}"] in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'memberships'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - - assert ["electric_insert_role_#{uuid_string}"] in rows - assert ["electric_update_role_#{uuid_string}"] in rows - - pg_sql = """ - CALL electric.unassign( - assignment_id => '#{assign_id}', - assign_table_full_name => '"public"."memberships"', - scope => '"public"."projects"', - user_column_name => 'user_id', - role_name_string => null, - role_column_name => 'role'); - """ - - {:ok, _, _rows} = query(conn, pg_sql) - - tables = list_tables(conn, "electric") - - assert join_table_name not in tables - - func_sql = """ - SELECT - routine_name - FROM - information_schema.routines - WHERE - routine_type = 'FUNCTION' - AND - routine_schema = 'electric'; - """ - - {:ok, _, rows} = query(conn, func_sql) - - assert ["upsert_role_#{uuid_string}"] not in rows - assert ["cleanup_role_#{uuid_string}"] not in rows - - triggers_sql = """ - SELECT - trigger_name - FROM - information_schema.triggers - WHERE - event_object_table = 'memberships'; - """ - - {:ok, _, rows} = query(conn, triggers_sql) - - assert ["electric_insert_role_#{uuid_string}"] not in rows - assert ["electric_update_role_#{uuid_string}"] not in rows - end - end -end diff --git a/components/electric/test/electric/ddlx/parser/tokenizer_test.exs b/components/electric/test/electric/ddlx/parser/tokenizer_test.exs index fce717285b..f6e853756c 100644 --- a/components/electric/test/electric/ddlx/parser/tokenizer_test.exs +++ b/components/electric/test/electric/ddlx/parser/tokenizer_test.exs @@ -181,6 +181,21 @@ defmodule Electric.DDLX.Parser.TokenizerTest do ] = tokens end + test "field accesses" do + tokens = + Tokenizer.tokens(~s[ROW.user_id = AUTH.user_id]) + + assert [ + {:unquoted_identifier, {1, 0, nil}, "ROW"}, + {:., {1, 3, nil}}, + {:unquoted_identifier, {1, 4, nil}, "user_id"}, + {:=, {1, 12, nil}}, + {:unquoted_identifier, {1, 14, nil}, "AUTH"}, + {:., {1, 18, nil}}, + {:unquoted_identifier, {1, 19, nil}, "user_id"} + ] = tokens + end + test "comments" do src = """ -- this is my first comment diff --git a/components/electric/test/electric/ddlx/parser_test.exs b/components/electric/test/electric/ddlx/parser_test.exs index ee65a44c83..1db95c8935 100644 --- a/components/electric/test/electric/ddlx/parser_test.exs +++ b/components/electric/test/electric/ddlx/parser_test.exs @@ -4,7 +4,6 @@ defmodule Electric.DDLX.ParserTest do alias Electric.DDLX.Parser alias Electric.DDLX.Command - alias Electric.DDLX.Command alias Electric.Satellite.SatPerms alias ElectricTest.PermissionsHelpers.Proto @@ -77,6 +76,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", table: Proto.table("my_default", "admin_users"), user_column: "user_id", scope: nil, @@ -97,6 +97,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", table: Proto.table("my_default", "admin_users"), user_column: "user_id", scope: nil, @@ -119,6 +120,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "pxrga7klxw65mybjn6vrta3vs5t2rkhe", table: Proto.table("my_default", "user_roles"), user_column: "user_id", scope: nil, @@ -139,6 +141,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "tcdchugjrxs6o52wlikvoaoqz5gef7du", table: Proto.table("application", "user_roles"), user_column: "user_id", scope: nil, @@ -160,6 +163,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "tcdchugjrxs6o52wlikvoaoqz5gef7du", table: Proto.table("application", "user_roles"), user_column: "user_id", scope: nil, @@ -181,6 +185,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "fq7ybdqfg6mee6cdnhx2ciyhzjxtwj3a", table: Proto.table("Application", "user_roles"), user_column: "user_id", scope: nil, @@ -202,6 +207,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "tcdchugjrxs6o52wlikvoaoqz5gef7du", table: Proto.table("application", "user_roles"), user_column: "user_id", scope: nil, @@ -224,6 +230,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "rzs4jo7bvzfmj2a5pjjblypdy2kw5vzc", table: Proto.table("my_default", "project_members"), user_column: "user_id", scope: Proto.table("my_default", "projects"), @@ -245,6 +252,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "fcld4tiw2qsr4yxlwmytijlj5tl4mklp", table: Proto.table("application", "project_members"), user_column: "user_id", scope: Proto.table("auth", "projects"), @@ -332,6 +340,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", table: Proto.table("my_default", "admin_users"), user_column: "user_id", scope: nil, @@ -352,6 +361,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "2uidsvryaa2k6xjbmq6zlu7nfy2ytg6b", table: Proto.table("my_default", "admin_users"), user_column: "user_id", scope: nil, @@ -378,6 +388,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "pxrga7klxw65mybjn6vrta3vs5t2rkhe", table: Proto.table("my_default", "user_roles"), user_column: "user_id", scope: nil, @@ -398,6 +409,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "pxrga7klxw65mybjn6vrta3vs5t2rkhe", table: Proto.table("my_default", "user_roles"), user_column: "user_id", scope: nil, @@ -420,6 +432,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "rzs4jo7bvzfmj2a5pjjblypdy2kw5vzc", table: Proto.table("my_default", "project_members"), user_column: "user_id", scope: Proto.table("my_default", "projects"), @@ -442,6 +455,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "uk3vbzo7am3uxtlnjprioxwgv52aeojz", table: Proto.table("my_default", "deliveries"), user_column: "driver_id", scope: Proto.table("my_default", "deliveries"), @@ -462,6 +476,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "z5wssirjrctqy3zfbs25yfuuvou77gip", table: Proto.table("other", "deliveries"), user_column: "driver_id", scope: Proto.table("other", "deliveries"), @@ -489,6 +504,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ assigns: [ %SatPerms.Assign{ + id: "o7iyzse5guwyxjwr367hpfbmcg2irbyi", table: Proto.table("my_default", "user_permissions"), user_column: "user_id", scope: nil, @@ -568,6 +584,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "6qwbckegxcxt2zesymulmwqotberhp4m", privilege: :UPDATE, table: Proto.table("thing", "Köln_en$ts"), role: Proto.role("house.admin"), @@ -594,6 +611,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "6qwbckegxcxt2zesymulmwqotberhp4m", privilege: :UPDATE, table: Proto.table("thing", "Köln_en$ts"), role: Proto.role("house.admin"), @@ -618,6 +636,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "6qwbckegxcxt2zesymulmwqotberhp4m", privilege: :UPDATE, table: Proto.table("thing", "Köln_en$ts"), role: Proto.role("house.admin"), @@ -644,6 +663,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "unz3ra6f4w3luf2wdfhjsiryuyp4bdse", check: "name = 'Paul'", columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -669,6 +689,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "l5clz3xxefjb7pn2erskct2qvh3jjxzv", check: "name = 'Paul'", columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -694,6 +715,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "2ag4ijgsjmrexpfbqzpyljuqnj4x4qry", check: "name = 'Paul'", columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -702,6 +724,7 @@ defmodule Electric.DDLX.ParserTest do scope: Proto.table("public", "projects") }, %SatPerms.Grant{ + id: "unz3ra6f4w3luf2wdfhjsiryuyp4bdse", check: "name = 'Paul'", columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -710,6 +733,7 @@ defmodule Electric.DDLX.ParserTest do scope: Proto.table("public", "projects") }, %SatPerms.Grant{ + id: "tr7tdsl5c7uv6pkcth5ybgtz6tddewnd", check: "name = 'Paul'", columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -733,6 +757,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "q5m3kn7dzjptvnf7a4y456l6n4j3bmy3", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -742,6 +767,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qcw7p6e4aj7nfev7vqwrgtuqx3kai3xd", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -751,6 +777,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qw366w63mnmifcedq3aqr7wt4gfxhc2v", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -760,6 +787,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qrvaeunuhz3tzvkvxfyidadr6w6a4zis", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -782,6 +810,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "q5m3kn7dzjptvnf7a4y456l6n4j3bmy3", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -791,6 +820,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qcw7p6e4aj7nfev7vqwrgtuqx3kai3xd", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -800,6 +830,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qw366w63mnmifcedq3aqr7wt4gfxhc2v", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -809,6 +840,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qrvaeunuhz3tzvkvxfyidadr6w6a4zis", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -833,6 +865,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "q5m3kn7dzjptvnf7a4y456l6n4j3bmy3", check: nil, columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, table: Proto.table("thing", "köln_en$ts"), @@ -842,6 +875,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qcw7p6e4aj7nfev7vqwrgtuqx3kai3xd", check: nil, columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, table: Proto.table("thing", "köln_en$ts"), @@ -851,6 +885,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qw366w63mnmifcedq3aqr7wt4gfxhc2v", check: nil, columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, table: Proto.table("thing", "köln_en$ts"), @@ -860,6 +895,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "qrvaeunuhz3tzvkvxfyidadr6w6a4zis", check: nil, columns: %SatPerms.ColumnList{names: ["col1", "col2"]}, table: Proto.table("thing", "köln_en$ts"), @@ -884,6 +920,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "2akoxmzkfwchadl6qcf22f6syd2btygl", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -893,6 +930,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "jk7n6coz7jejdybyayxtwfni7jet43pv", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -902,6 +940,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "nv2253mnh3xo6ozaefj4kpfmbb5ervsz", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -911,6 +950,7 @@ defmodule Electric.DDLX.ParserTest do path: nil }, %SatPerms.Grant{ + id: "t3rp5vrt5r3tzzye33pcgwmxyovzgxb7", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -935,6 +975,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ grants: [ %SatPerms.Grant{ + id: "dfhpttndlmoswwso2idggsadq4vwuikg", check: nil, columns: nil, table: Proto.table("thing", "köln_en$ts"), @@ -950,6 +991,56 @@ defmodule Electric.DDLX.ParserTest do tag: "ELECTRIC GRANT" } end + + test "grant with field accesses in check clause" do + sql = + "ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE (row.user_id = AUTH.user_id)" + + {:ok, result} = Parser.parse(sql) + + assert result == %Command{ + cmds: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "baa4uqpavntlksnbmmw7eqp24mela3ed", + check: "ROW.user_id = AUTH.user_id", + table: Proto.table("issues"), + privilege: :SELECT, + role: Proto.role("editor"), + scope: Proto.scope("projects") + } + ] + }, + stmt: sql, + tables: [{"public", "issues"}], + tag: "ELECTRIC GRANT" + } + end + + test "grant with multiple clauses in check clause" do + sql = + "ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE ((row.user_id = AUTH.user_id) AND (thing.reason > 2))" + + {:ok, result} = Parser.parse(sql) + + assert result == %Command{ + cmds: %SatPerms.DDLX{ + grants: [ + %SatPerms.Grant{ + id: "baa4uqpavntlksnbmmw7eqp24mela3ed", + check: "(ROW.user_id = AUTH.user_id) AND (THING.reason > 2)", + table: Proto.table("issues"), + privilege: :SELECT, + role: Proto.role("editor"), + scope: Proto.scope("projects") + } + ] + }, + stmt: sql, + tables: [{"public", "issues"}], + tag: "ELECTRIC GRANT" + } + end end describe "ELECTRIC REVOKE" do @@ -961,6 +1052,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ revokes: [ %SatPerms.Revoke{ + id: "dajpwzccceliaxpwwru4rxc4f4qisw6j", privilege: :UPDATE, table: Proto.table("Thing", "Köln_en$ts"), role: Proto.role("house.admin"), @@ -981,24 +1073,28 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ revokes: [ %SatPerms.Revoke{ + id: "7nwvujrdfzxc6i2733x2bl3z72ea7htc", privilege: :SELECT, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), scope: Proto.table("my_default", "projects") }, %SatPerms.Revoke{ + id: "tuanwoqchn5fvfkffu6bvjdhqkogo6nd", privilege: :INSERT, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), scope: Proto.table("my_default", "projects") }, %SatPerms.Revoke{ + id: "5re2yqzv7oogtv7p7pyt7cmfxtnl3bpo", privilege: :UPDATE, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), scope: Proto.table("my_default", "projects") }, %SatPerms.Revoke{ + id: "cpqo4as7pkf4coouxze6xfec2bd65hio", privilege: :DELETE, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), @@ -1025,6 +1121,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ revokes: [ %SatPerms.Revoke{ + id: "5re2yqzv7oogtv7p7pyt7cmfxtnl3bpo", privilege: :UPDATE, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), @@ -1046,6 +1143,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ revokes: [ %SatPerms.Revoke{ + id: "lwxqdr32qyfd6g7e3jfiioid4pxv7j2i", privilege: :UPDATE, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), @@ -1067,18 +1165,21 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ revokes: [ %SatPerms.Revoke{ + id: "ew7qw5tu7zaqwuwv72cdppnqbbynzpoj", privilege: :INSERT, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), scope: Proto.table("thing", "projects") }, %SatPerms.Revoke{ + id: "lwxqdr32qyfd6g7e3jfiioid4pxv7j2i", privilege: :UPDATE, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), scope: Proto.table("thing", "projects") }, %SatPerms.Revoke{ + id: "7ocohdpexauh56fkfgsakqeldvposuvw", privilege: :DELETE, table: Proto.table("thing", "köln_en$ts"), role: Proto.role("house.admin"), @@ -1170,6 +1271,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ unassigns: [ %SatPerms.Unassign{ + id: "o7iyzse5guwyxjwr367hpfbmcg2irbyi", table: Proto.table("my_default", "user_permissions"), user_column: "user_id", scope: nil, @@ -1192,6 +1294,7 @@ defmodule Electric.DDLX.ParserTest do cmds: %SatPerms.DDLX{ unassigns: [ %SatPerms.Unassign{ + id: "nugp4djlkslpzpevh245r2kzurlf3k4p", table: Proto.table("other", "user_permissions"), user_column: "user_id", scope: Proto.table("other", "projects"), diff --git a/components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs b/components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs new file mode 100644 index 0000000000..5d98a59019 --- /dev/null +++ b/components/electric/test/electric/postgres/extension/schema_loader/epgsql_test.exs @@ -0,0 +1,269 @@ +defmodule Electric.Postgres.Extension.SchemaLoader.EpgsqlTest do + use Electric.Extension.Case, async: false + + alias Electric.DDLX.Command + alias Electric.Postgres.Extension + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Satellite.Permissions.Consumer + alias Electric.Satellite.SatPerms + alias ElectricTest.PermissionsHelpers.Proto + + def epgsql_loader(conn) do + {:ok, loader} = SchemaLoader.connect({SchemaLoader.Epgsql, []}, __connection__: conn) + loader + end + + def epgsql_loader_with_rules(conn) do + loader = epgsql_loader(conn) + + rules = + %SatPerms.Rules{ + id: 2, + parent_id: 1, + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + } + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + + {loader, rules} + end + + test_tx "global_permissions/1", fn conn -> + loader = epgsql_loader(conn) + assert {:ok, %SatPerms.Rules{id: 1} = _rules} = SchemaLoader.global_permissions(loader) + end + + test_tx "global_permissions/2", fn conn -> + loader = epgsql_loader(conn) + assert {:ok, %SatPerms.Rules{id: 1} = _rules} = SchemaLoader.global_permissions(loader, 1) + end + + test_tx "save_global_permissions/2", fn conn -> + loader = epgsql_loader(conn) + + rules = + %SatPerms.Rules{ + id: 2, + parent_id: 1, + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + } + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + assert {:ok, %SatPerms.Rules{id: 2} = ^rules} = SchemaLoader.global_permissions(loader) + end + + test_tx "user_permissions/2", fn conn -> + {loader, _rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, + %SatPerms{ + id: 1, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 2, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + end + + test_tx "user_permissions/3", fn conn -> + {loader, _rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, %SatPerms{id: 1}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, %SatPerms{id: 1}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27", 1) + end + + test_tx "save_user_permissions/3", fn conn -> + {loader, _rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, %SatPerms{id: 1, rules: %{id: rules_id}}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, %SatPerms{id: 2, roles: [_]}} = + SchemaLoader.save_user_permissions( + loader, + "e815dfe6-f64d-472a-a322-bfc9e7993d27", + %SatPerms.Roles{ + parent_id: 1, + rules_id: rules_id, + roles: [ + %SatPerms.Role{ + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + role: "editor" + } + ] + } + ) + + assert {:ok, _loader, %SatPerms{id: 2, roles: [_]}} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + end + + test_tx "save_global_permissions/2 migrates existing user roles", fn conn -> + {loader, rules} = epgsql_loader_with_rules(conn) + + assert {:ok, _loader, + %SatPerms{ + id: 1, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 2, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + roles: [], + rules: %SatPerms.Rules{id: 2} + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + + assert {:ok, _loader, %SatPerms{id: 3, roles: [_]}} = + SchemaLoader.save_user_permissions( + loader, + "e815dfe6-f64d-472a-a322-bfc9e7993d27", + %SatPerms.Roles{ + parent_id: 1, + rules_id: 2, + roles: [ + %SatPerms.Role{ + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + role: "editor" + } + ] + } + ) + + ddlx = + Command.ddlx( + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("comments"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ] + ) + + rules = Consumer.apply_ddlx(rules, ddlx) + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + + assert {:ok, _loader, + %SatPerms{ + id: 5, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 4, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + + ddlx = + Command.ddlx( + grants: [ + Proto.grant( + privilege: :DELETE, + table: Proto.table("comments"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ] + ) + + rules = Consumer.apply_ddlx(rules, ddlx) + + assert {:ok, _loader} = SchemaLoader.save_global_permissions(loader, rules) + + assert {:ok, _loader, + %SatPerms{ + id: 7, + user_id: "e815dfe6-f64d-472a-a322-bfc9e7993d27", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "e815dfe6-f64d-472a-a322-bfc9e7993d27") + + assert {:ok, _loader, + %SatPerms{ + id: 6, + user_id: "11f03d43-09e9-483b-9e8c-1f0e117f20fe", + rules: ^rules + }} = + SchemaLoader.user_permissions(loader, "11f03d43-09e9-483b-9e8c-1f0e117f20fe") + + {:ok, _, rows} = + :epgsql.equery( + conn, + "select count(id) as n from #{Extension.user_perms_table()} where global_perms_id = $1 group by (user_id)", + [rules.id] + ) + + # two users + assert length(rows) == 2 + + # there should only be one user permissions state for each user for each global rules state + for {n} <- rows do + assert n == 1 + end + end +end diff --git a/components/electric/test/electric/replication/postgres/migration_consumer_test.exs b/components/electric/test/electric/replication/postgres/migration_consumer_test.exs index f7b09139bf..dad3de4c8a 100644 --- a/components/electric/test/electric/replication/postgres/migration_consumer_test.exs +++ b/components/electric/test/electric/replication/postgres/migration_consumer_test.exs @@ -1,8 +1,8 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader - alias Electric.Postgres.MockSchemaLoader - + alias Electric.Replication.Changes alias Electric.Replication.Changes.NewRecord alias Electric.Replication.Changes.Transaction alias Electric.Replication.Postgres.MigrationConsumer @@ -78,7 +78,17 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do {"public", "first_enum_table"} => 10001, {"electric", "shadow__public__first_enum_table"} => 20001, {"public", "second_enum_table"} => 10002, - {"electric", "shadow__public__second_enum_table"} => 20002 + {"electric", "shadow__public__second_enum_table"} => 20002, + {"public", "users"} => 30001, + {"electric", "shadow__public__users"} => 30011, + {"public", "projects"} => 30002, + {"electric", "shadow__public__projects"} => 30012, + {"public", "project_memberships"} => 30003, + {"electric", "shadow__public__project_memberships"} => 30013, + {"public", "teams"} => 30004, + {"electric", "shadow__public__teams"} => 30014, + {"public", "team_memberships"} => 30005, + {"electric", "shadow__public__team_memberships"} => 30015 } } @@ -86,8 +96,30 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do {"public", "mistakes"} => ["id"] } + migrations = [ + {"20220000", + [ + """ + create table projects (id uuid primary key) + """, + """ + create table users (id uuid primary key) + """, + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + project_role text not null + ) + """ + ]} + ] + backend = - MockSchemaLoader.start_link([oids: oids, pks: pks], name: __MODULE__.Loader) + MockSchemaLoader.start_link([oids: oids, pks: pks, migrations: migrations], + name: __MODULE__.Loader + ) pid = start_link_supervised!( @@ -105,236 +137,485 @@ defmodule Electric.Replication.Postgres.MigrationConsumerTest do {:ok, origin: origin, producer: producer, version: version, loader: backend} end - test "refreshes subscription after receiving a migration", cxt do - %{producer: producer, origin: origin, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "101", - "txts" => "201" - }, - tags: [] - } - ] - } - ] - - GenStage.call(producer, {:emit, cxt.loader, events, version}) - - assert_receive {MockSchemaLoader, {:refresh_subscription, ^origin}}, 1500 - end + describe "migrations" do + test "refreshes subscription after receiving a migration", cxt do + %{producer: producer, origin: origin, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + } + ] + } + ] + + GenStage.call(producer, {:emit, cxt.loader, events, version}) + + assert_receive {MockSchemaLoader, {:refresh_subscription, ^origin}}, 1500 + end - test "captures migration records", cxt do - %{origin: origin, producer: producer, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "100", - "txts" => "200" - }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "7", - "query" => "create table other_thing (id uuid primary key);", - "txid" => "100", - "txts" => "200" + test "captures migration records", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "8", - "query" => "create table yet_another_thing (id uuid primary key);", - "txid" => "100", - "txts" => "200" + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "7", + "query" => "create table other_thing (id uuid primary key);", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - } - ], - commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] - - GenStage.call(producer, {:emit, cxt.loader, events, version}) - - assert_receive {FakeConsumer, :events, ^events}, @receive_timeout - assert_receive {MockSchemaLoader, :load}, @receive_timeout - # only 1 save instruction is observed - assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _]}}, @receive_timeout - refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout - - assert Enum.map(schema.tables, & &1.name.name) == [ - "something_else", - "other_thing", - "yet_another_thing", - "shadow__public__something_else", - "shadow__public__other_thing", - "shadow__public__yet_another_thing" - ] - end + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "8", + "query" => "create table yet_another_thing (id uuid primary key);", + "txid" => "100", + "txts" => "200" + }, + tags: [] + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, events, version}) + + assert_receive {FakeConsumer, :events, ^events}, @receive_timeout + assert_receive {MockSchemaLoader, :load}, @receive_timeout + # only 1 save instruction is observed + assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _]}}, @receive_timeout + refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout + + assert Enum.map(schema.tables, & &1.name.name) == [ + "projects", + "users", + "project_memberships", + "something_else", + "other_thing", + "yet_another_thing", + "shadow__public__projects", + "shadow__public__users", + "shadow__public__project_memberships", + "shadow__public__something_else", + "shadow__public__other_thing", + "shadow__public__yet_another_thing" + ] + end - test "captures unique enum types from migrations", cxt do - %{origin: origin, producer: producer, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "1", - "query" => "create type colour as enum ('red', 'green', 'blue');", - "txid" => "100", - "txts" => "200" - }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "2", - "query" => """ - create table first_enum_table ( - id uuid primary key, - foo colour - ); - """, - "txid" => "100", - "txts" => "200" + test "captures unique enum types from migrations", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "1", + "query" => "create type colour as enum ('red', 'green', 'blue');", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "3", - "query" => "create type colour as enum ('red', 'green', 'blue');", - "txid" => "100", - "txts" => "200" + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "2", + "query" => """ + create table first_enum_table ( + id uuid primary key, + foo colour + ); + """, + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "4", - "query" => """ - create table second_enum_table ( - id uuid primary key, - bar colour - ); - """, - "txid" => "100", - "txts" => "200" + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "3", + "query" => "create type colour as enum ('red', 'green', 'blue');", + "txid" => "100", + "txts" => "200" + }, + tags: [] }, - tags: [] - } - ], - commit_timestamp: ~U[2024-02-06 10:08:00.000000Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] - - GenStage.call(producer, {:emit, cxt.loader, events, version}) - - # only 1 save instruction is observed - assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _, _]}}, @receive_timeout - refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "4", + "query" => """ + create table second_enum_table ( + id uuid primary key, + bar colour + ); + """, + "txid" => "100", + "txts" => "200" + }, + tags: [] + } + ], + commit_timestamp: ~U[2024-02-06 10:08:00.000000Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, events, version}) + + # only 1 save instruction is observed + assert_receive {MockSchemaLoader, {:save, ^version, schema, [_, _, _, _]}}, @receive_timeout + refute_receive {MockSchemaLoader, {:save, _, _schema}}, @refute_receive_timeout + + assert [ + %{ + name: %{name: "colour", schema: "public"}, + values: ["red", "green", "blue"] + } + ] = schema.enums + end - assert [ - %{ - name: %{name: "colour", schema: "public"}, - values: ["red", "green", "blue"] - } - ] = schema.enums + test "filters non-migration records", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + raw_events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + }, + %NewRecord{ + relation: {"electric", "schema"}, + record: %{ + "id" => "7", + "version" => version, + "schema" => "{}" + }, + tags: [] + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + filtered_events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table something_else (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, ^filtered_events}, 1000 + assert_receive {MockSchemaLoader, :load}, 500 + + assert_receive {MockSchemaLoader, + {:save, ^version, _schema, + ["create table something_else (id uuid primary key);"]}} + end end - test "filters non-migration records", cxt do - %{origin: origin, producer: producer, version: version} = cxt - assert_receive {MockSchemaLoader, {:connect, _}} - - raw_events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "101", - "txts" => "201" - }, - tags: [] - }, - %NewRecord{ - relation: {"electric", "schema"}, - record: %{ - "id" => "7", - "version" => version, - "schema" => "{}" - }, - tags: [] - } - ], - commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] + describe "permissions" do + alias ElectricTest.PermissionsHelpers.Proto + alias ElectricTest.PermissionsHelpers.Chgs + + test "converts ddlx events into global permission change messages", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + raw_events = [ + %Transaction{ + changes: [ + Chgs.ddlx( + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + user_column: "user_id", + role_column: "project_role", + scope: Proto.table("projects") + ) + ] + ) + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, filtered_events}, 1000 + + assert [ + %Transaction{ + changes: [ + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{permissions_id: 2} + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + publication: "mock_pub", + origin_type: :postgresql + } + ] = filtered_events + end - filtered_events = [ - %Transaction{ - changes: [ - %NewRecord{ - relation: {"electric", "ddl_commands"}, - record: %{ - "id" => "6", - "query" => "create table something_else (id uuid primary key);", - "txid" => "101", - "txts" => "201" - }, - tags: [] + test "converts membership changes into user permission change messages", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + raw_events = [ + %Transaction{ + changes: [ + Chgs.ddlx( + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + user_column: "user_id", + role_column: "project_role", + scope: Proto.table("projects") + ) + ] + ) + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + assert_receive {MockSchemaLoader, {:save_global_permissions, _}}, 500 + + assert_receive {FakeConsumer, :events, _filtered_events}, 1000 + + insert = + Chgs.insert( + {"public", "project_memberships"}, + %{ + "id" => "pm-1", + "user_id" => "user-1", + "project_id" => "p-1", + "project_role" => "admin" } - ], - commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], - origin: origin, - publication: "mock_pub", - origin_type: :postgresql - } - ] - - GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) - - assert_receive {FakeConsumer, :events, ^filtered_events}, 1000 - assert_receive {MockSchemaLoader, :load}, 500 + ) + + raw_events = [ + %Transaction{ + changes: [insert], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, filtered_events}, 1000 + + assert [ + %Transaction{ + changes: [ + ^insert, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: "user-1", + permissions: _user_perms + } + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + publication: "mock_pub", + origin_type: :postgresql + } + ] = filtered_events + + assert_receive {MockSchemaLoader, {:save_user_permissions, "user-1", _}}, 500 + end - assert_receive {MockSchemaLoader, - {:save, ^version, _schema, - ["create table something_else (id uuid primary key);"]}} + test "uses updated schema information", cxt do + %{origin: origin, producer: producer, version: version} = cxt + assert_receive {MockSchemaLoader, {:connect, _}} + + insert = + Chgs.insert( + {"public", "team_memberships"}, + %{ + "id" => "tm-1", + "user_id" => "user-1", + "team_id" => "t-1", + "team_role" => "manager" + } + ) + + raw_events = [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table teams (id uuid primary key);", + "txid" => "101", + "txts" => "201" + }, + tags: [] + }, + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "7", + "query" => """ + create table team_memberships ( + id uuid primary key, + team_id uuid references teams (id), + user_id uuid references users (id), + team_role text not null + ); + """, + "txid" => "101", + "txts" => "201" + }, + tags: [] + }, + Chgs.ddlx( + assigns: [ + Proto.assign( + table: Proto.table("team_memberships"), + user_column: "user_id", + role_column: "team_role", + scope: Proto.table("teams") + ) + ] + ), + insert + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + origin: origin, + publication: "mock_pub", + origin_type: :postgresql + } + ] + + GenStage.call(producer, {:emit, cxt.loader, raw_events, version}) + + assert_receive {FakeConsumer, :events, filtered_events}, 1000 + + assert [ + %Transaction{ + changes: [ + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "6", + "query" => "create table teams (id uuid primary key);", + "txid" => "101", + "txts" => "201" + } + }, + %NewRecord{ + relation: {"electric", "ddl_commands"}, + record: %{ + "id" => "7", + "query" => """ + create table team_memberships ( + id uuid primary key, + team_id uuid references teams (id), + user_id uuid references users (id), + team_role text not null + ); + """, + "txid" => "101", + "txts" => "201" + } + }, + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{permissions_id: 2} + }, + ^insert, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: "user-1", + permissions: _user_perms + } + } + ], + commit_timestamp: ~U[2023-05-02 10:08:00.948788Z], + publication: "mock_pub", + origin_type: :postgresql + } + ] = filtered_events + end end end diff --git a/components/electric/test/electric/satellite/permissions/consumer_test.exs b/components/electric/test/electric/satellite/permissions/consumer_test.exs new file mode 100644 index 0000000000..f4e864ec37 --- /dev/null +++ b/components/electric/test/electric/satellite/permissions/consumer_test.exs @@ -0,0 +1,1334 @@ +defmodule Electric.Satellite.Permissions.ConsumerTest do + use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader + + alias Electric.DDLX + alias Electric.DDLX.Command + alias Electric.Replication.Changes + alias Electric.Satellite.Permissions.Consumer + alias Electric.Satellite.SatPerms + alias ElectricTest.PermissionsHelpers.{Chgs, Proto} + + def apply_ddlx(rules \\ %SatPerms.Rules{}, cmds) do + Consumer.apply_ddlx(rules, Command.ddlx(cmds)) + end + + def new(cmds) do + apply_ddlx(cmds) + end + + def parse_ddlx(ddlx) do + ddlx + |> Enum.map(&DDLX.Parser.parse/1) + |> Enum.map(&elem(&1, 1)) + |> Enum.map(fn %{cmds: %SatPerms.DDLX{} = cmd} -> cmd end) + end + + @scoped_assign_relation {"public", "project_memberships"} + @unscoped_assign_relation {"public", "site_admins"} + + describe "apply_ddlx/2" do + test "ASSIGN" do + assign = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + + assert %SatPerms.Rules{id: 1, parent_id: 0} = rules = apply_ddlx(assigns: [assign]) + + assert [^assign] = rules.assigns + end + + test "ASSIGN, UNASSIGN" do + rules = + new( + assigns: [ + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + ] + ) + + updated = + apply_ddlx( + rules, + unassigns: [ + Proto.unassign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + ] + ) + + assert updated.id == 2 + assert updated.assigns == [] + end + + test "ASSIGN ... IF, UNASSIGN" do + rules = + new( + assigns: [ + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + if: "something()" + ) + ] + ) + + updated = + apply_ddlx( + rules, + unassigns: [ + Proto.unassign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + ] + ) + + assert updated.id == 2 + assert updated.assigns == [] + end + + test "ASSIGN, ASSIGN, UNASSIGN" do + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects") + ) + + assign2 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_column: "role_name" + ) + + rules = new(assigns: [assign1, assign2]) + + rules = + apply_ddlx(rules, + unassigns: [ + Proto.unassign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects") + ) + ] + ) + + assert rules.id == 2 + assert [^assign2] = rules.assigns + end + + test "ASSIGN, re-ASSIGN" do + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects") + ) + + assign2 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin", + scope: Proto.scope("projects"), + if: "some_test()" + ) + + rules = new(assigns: [assign1]) + + rules = apply_ddlx(rules, assigns: [assign2]) + + assert rules.id == 2 + assert [^assign2] = rules.assigns + end + + test "GRANT" do + grant = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + rules = apply_ddlx(grants: [grant]) + + assert rules.id == 1 + assert [^grant] = rules.grants + end + + test "GRANT, REVOKE" do + grant = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + rules = new(grants: [grant]) + + updated = + apply_ddlx( + rules, + revokes: [ + Proto.revoke( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + ] + ) + + assert updated.grants == [] + end + + test "GRANT ... CHECK, REVOKE" do + grant = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects"), + check: "something()" + ) + + rules = new(grants: [grant]) + + updated = + apply_ddlx( + rules, + revokes: [ + Proto.revoke( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + ] + ) + + assert updated.grants == [] + end + + test "GRANT, GRANT, REVOKE" do + grant1 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + grant2 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :UPDATE, + scope: Proto.scope("projects") + ) + + rules = new(grants: [grant1, grant2]) + + updated = + apply_ddlx( + rules, + revokes: [ + Proto.revoke( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + ] + ) + + assert updated.grants == [grant2] + end + + test "GRANT, re-GRANT" do + grant1 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects") + ) + + grant2 = + Proto.grant( + table: Proto.table("issues"), + role: Proto.role("editor"), + privilege: :INSERT, + scope: Proto.scope("projects"), + check: "some_check()" + ) + + rules = new(grants: [grant1]) + + updated = apply_ddlx(rules, grants: [grant2]) + + assert updated.grants == [grant2] + end + + test "update with DDLX" do + ddlx = [ + ~S[ELECTRIC ASSIGN (projects, members.role_name) TO members.user_id], + ~S[ELECTRIC ASSIGN (projects, members.role_name) TO members.user_id IF (some_check_passes())], + ~S[ELECTRIC GRANT ALL ON issues TO (projects, 'editor')], + ~S[ELECTRIC GRANT READ ON issues TO (projects, 'editor') WHERE ((ROW.user_id = AUTH.user_id) AND (ROW.value > 3))], + ~S[ELECTRIC REVOKE DELETE ON issues FROM (projects, 'editor')] + ] + + rules = + ddlx + |> parse_ddlx() + |> Enum.reduce(%SatPerms.Rules{}, &Consumer.apply_ddlx(&2, &1)) + + assert rules == %SatPerms.Rules{ + id: 5, + parent_id: 4, + assigns: [ + Proto.assign( + scope: Proto.scope("projects"), + table: Proto.table("members"), + user_column: "user_id", + role_column: "role_name", + if: "some_check_passes()" + ) + ], + grants: [ + Proto.grant( + privilege: :UPDATE, + scope: Proto.scope("projects"), + table: Proto.table("issues"), + role: Proto.role("editor") + ), + Proto.grant( + privilege: :SELECT, + scope: Proto.scope("projects"), + table: Proto.table("issues"), + role: Proto.role("editor"), + check: "(ROW.user_id = AUTH.user_id) AND (ROW.value > 3)" + ), + Proto.grant( + privilege: :INSERT, + scope: Proto.scope("projects"), + table: Proto.table("issues"), + role: Proto.role("editor") + ) + ] + } + + ddlx = [ + ~S[ELECTRIC UNASSIGN (projects, members.role_name) FROM members.user_id], + ~S[ELECTRIC REVOKE UPDATE ON issues FROM (projects, 'editor')], + ~S[ELECTRIC REVOKE READ ON issues FROM (projects, 'editor')], + ~S[ELECTRIC REVOKE INSERT ON issues FROM (projects, 'editor')] + ] + + rules = + ddlx + |> parse_ddlx() + |> Enum.reduce(rules, &Consumer.apply_ddlx(&2, &1)) + + assert rules == %SatPerms.Rules{ + id: 9, + parent_id: 8, + assigns: [], + grants: [] + } + end + end + + def loader_with_global_perms(cxt) do + loader = loader(cxt) + + ddlx = + Command.ddlx( + grants: [ + Proto.grant( + privilege: :INSERT, + table: Proto.table("issues"), + role: Proto.role("editor"), + scope: Proto.scope("projects") + ) + ], + assigns: [ + Proto.assign( + table: Proto.table("project_memberships"), + scope: Proto.scope("projects"), + user_column: "user_id", + role_column: "project_role" + ), + Proto.assign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + ) + + assert {:ok, 3, loader, rules} = Consumer.update_global(ddlx, loader) + + {loader, rules} + end + + def loader(_cxt) do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + """ + create table projects (id uuid primary key) + """, + """ + create table users (id uuid primary key) + """, + """ + create table teams (id uuid primary key) + """, + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + project_role text not null + ) + """, + """ + create table team_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + team_id uuid not null references teams (id), + team_role text not null + ) + """, + """ + create table site_admins ( + id uuid primary key, + user_id uuid not null references users (id), + site_role text not null + ) + """, + """ + create table my_default.admin_users ( + id uuid primary key, + user_id uuid not null references users (id) + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + loader + end + + describe "global rules serialisation" do + test "is initialised with empty state", cxt do + loader = loader(cxt) + + assert {:ok, %SatPerms.Rules{id: 1, assigns: [], grants: []}} = + SchemaLoader.global_permissions(loader) + end + + test "can update its state", cxt do + loader = loader(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + + ddlx = Command.ddlx(assigns: [assign1]) + + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.ddlx(ddlx) + ]) + + assert {:ok, tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 2 + } + } + ] + + assert {:ok, rules} = SchemaLoader.global_permissions(loader) + assert %SatPerms.Rules{id: 2, parent_id: 1, assigns: [^assign1]} = rules + + assign2 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin2" + ) + + ddlx = Command.ddlx(assigns: [assign2]) + + tx = + Chgs.tx([ + Chgs.ddlx(ddlx), + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}) + ]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert tx.changes == [ + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 3 + } + }, + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}) + ] + + assert {:ok, rules} = SchemaLoader.global_permissions(loader) + assert %SatPerms.Rules{id: 3, parent_id: 2, assigns: [^assign1, ^assign2]} = rules + end + + test "sequential updates are coalesced", cxt do + # we want to minimize permissions churn when possible + loader = loader(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + assign1 = + Proto.assign( + table: Proto.table("my_default", "admin_users"), + user_column: "user_id", + role_name: "admin" + ) + + ddlx1 = Command.ddlx(assigns: [assign1]) + + assign2 = + Proto.assign( + table: Proto.table("project_memberships"), + user_column: "user_id", + scope: Proto.scope("projects"), + role_column: "role" + ) + + ddlx2 = Command.ddlx(assigns: [assign2]) + + assign3 = + Proto.assign( + table: Proto.table("team_memberships"), + user_column: "user_id", + scope: Proto.scope("teams"), + role_column: "role" + ) + + ddlx3 = Command.ddlx(assigns: [assign3]) + + tx = + Chgs.tx([ + Chgs.ddlx(ddlx1), + Chgs.ddlx(ddlx2), + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert({"public", "kittens"}, %{"fur" => "furry"}), + Chgs.ddlx(ddlx3) + ]) + + assert {:ok, tx, _consumer, _loader} = Consumer.update(tx, consumer, loader) + + assert tx.changes == [ + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 2 + } + }, + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert({"public", "kittens"}, %{"fur" => "furry"}), + %Changes.UpdatedPermissions{ + type: :global, + permissions: %Changes.UpdatedPermissions.GlobalPermissions{ + permissions_id: 3 + } + } + ] + end + end + + @user_id "7a81b0d0-97bf-466d-9053-4612146c2b67" + + describe "user roles state" do + test "starts with empty state", cxt do + {loader, rules} = loader_with_global_perms(cxt) + + assert {:ok, _loader, + %SatPerms{ + id: 1, + user_id: @user_id, + rules: ^rules, + roles: [] + } = perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert {:ok, _loader, ^perms} = + SchemaLoader.user_permissions(loader, @user_id) + end + + test "can load a specific version", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + + assert {:ok, loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert {:ok, ^perms} = + SchemaLoader.user_permissions(loader, @user_id, perms.id) + + assert {:ok, _loader, other_perms} = + SchemaLoader.user_permissions(loader, "7c9fe38c-895b-48f5-9b31-bb6ca992bf2b") + + refute other_perms.id == perms.id + + # attempting to load another user's perms by id + assert {:error, _} = + SchemaLoader.user_permissions(loader, @user_id, other_perms.id) + end + + test "scoped user roles are added via an insert to roles table", cxt do + {loader, rules} = loader_with_global_perms(cxt) + {:ok, consumer} = Consumer.new(loader) + + %{assigns: [%{id: assign_id1}, %{id: assign_id2}]} = rules + + # table: Proto.table("project_memberships"), + # scope: Proto.scope("projects"), + # user_column: "user_id", + # role_name: "editor" + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 2, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id2, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["5c0fd272-3fc2-4ae8-8574-92823c814096"], + assign_id: assign_id1, + role: "site_admin", + user_id: @user_id, + scope: nil + }, + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id2, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "new assign rules are used on changes in tx", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + assign = + Proto.assign( + table: Proto.table("team_memberships"), + scope: Proto.scope("teams"), + user_column: "user_id", + role_column: "team_role" + ) + + ddlx = Command.ddlx(assigns: [assign]) + + tx = + Chgs.tx([ + Chgs.ddlx(ddlx), + Chgs.insert( + {"public", "team_memberships"}, + %{ + "id" => "b72c24b5-20b5-4eea-ab12-ec38d6adcab7", + "team_role" => "team_owner", + "user_id" => @user_id, + "team_id" => "7dde618b-0cb2-44b5-8b12-b98c59338116" + } + ) + ]) + + assert {:ok, _tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert Enum.filter(perms.roles, &(&1.assign_id == assign.id)) == [ + %SatPerms.Role{ + row_id: ["b72c24b5-20b5-4eea-ab12-ec38d6adcab7"], + assign_id: assign.id, + role: "team_owner", + user_id: @user_id, + scope: %SatPerms.Scope{ + table: Proto.table("teams"), + id: ["7dde618b-0cb2-44b5-8b12-b98c59338116"] + } + } + ] + end + + test "user roles are updated via an update to roles table", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "project_role" => "manager" + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "project_role" => "manager" + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id, + role: "manager", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "changes in role ownership are managed", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + user_id2 = "0c7afad3-213a-4158-9e89-312fc5e682e1" + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "user_id" => user_id2 + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert {:ok, _loader, perms2} = + SchemaLoader.user_permissions(loader, user_id2) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + assert %{id: 5, user_id: user_id2, rules: %{id: 2}} = perms2 + + assert tx.changes == [ + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "user_id" => user_id2 + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + }, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: user_id2, + permissions: perms2 + } + } + ] + + assert perms.roles == [] + + assert perms2.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id, + role: "editor", + user_id: user_id2, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + + test "changes in role scope are managed", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + update = + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{ + "project_id" => "234" + } + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + update, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["234"]} + } + ] + end + + test "user roles are deleted with deletes to roles table", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + %{assigns: [_, %{id: assign_id}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ), + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "5e41153f-eb42-4b97-8f42-85ca8f40fa1d", + "project_role" => "viewer", + "user_id" => @user_id, + "project_id" => "234" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.delete( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 4, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + Chgs.delete( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ), + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["5e41153f-eb42-4b97-8f42-85ca8f40fa1d"], + assign_id: assign_id, + role: "viewer", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["234"]} + } + ] + end + + test "scoped roles are deleted when columns are nulled", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + for column <- ~w(user_id project_id project_role) do + update = + Chgs.update( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + }, + %{column => nil} + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + update, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [] + end + end + + test "unscoped roles are deleted when columns are nulled", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + for column <- ~w(user_id site_role) do + update = + Chgs.update( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + }, + %{column => nil} + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 3, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [ + update, + %Changes.UpdatedPermissions{ + type: :user, + permissions: %Changes.UpdatedPermissions.UserPermissions{ + user_id: @user_id, + permissions: perms + } + } + ] + + assert perms.roles == [] + end + end + + test "updates with no changes do nothing", cxt do + {loader, _rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + update = + Chgs.update( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + }, + %{} + ) + + tx = Chgs.tx([update]) + + assert {:ok, tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 2, user_id: @user_id, rules: %{id: 2}} = perms + + assert tx.changes == [update] + end + + test "roles belonging to removed assigns are GC'd", cxt do + {loader, rules} = loader_with_global_perms(cxt) + assert {:ok, consumer} = Consumer.new(loader) + %{assigns: [%{id: _assign_id1}, %{id: assign_id2}]} = rules + + tx = + Chgs.tx([ + Chgs.insert( + @unscoped_assign_relation, + %{ + "id" => "5c0fd272-3fc2-4ae8-8574-92823c814096", + "site_role" => "site_admin", + "user_id" => @user_id + } + ) + ]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + ddlx = + Command.ddlx( + unassigns: [ + Proto.unassign( + table: Proto.table("site_admins"), + user_column: "user_id", + role_column: "site_role" + ) + ] + ) + + tx = Chgs.tx([Chgs.ddlx(ddlx)]) + + assert {:ok, _tx, consumer, loader} = Consumer.update(tx, consumer, loader) + + tx = + Chgs.tx([ + Chgs.insert( + @scoped_assign_relation, + %{ + "id" => "db87f03f-89e1-48b4-a5c3-6cdbafb2837d", + "project_role" => "editor", + "user_id" => @user_id, + "project_id" => "123" + } + ) + ]) + + assert {:ok, _tx, _consumer, loader} = Consumer.update(tx, consumer, loader) + + assert {:ok, _loader, perms} = + SchemaLoader.user_permissions(loader, @user_id) + + assert %{id: 4, user_id: @user_id, rules: %{id: 3}} = perms + + assert perms.roles == [ + %SatPerms.Role{ + row_id: ["db87f03f-89e1-48b4-a5c3-6cdbafb2837d"], + assign_id: assign_id2, + role: "editor", + user_id: @user_id, + scope: %SatPerms.Scope{table: Proto.table("projects"), id: ["123"]} + } + ] + end + end + + test "sqlite ddlx messages are a no-op", cxt do + loader = loader(cxt) + assert {:ok, consumer} = Consumer.new(loader) + + ddlx = Command.ddlx(sqlite: [Proto.sqlite("create table local (id primary key)")]) + + tx = + Chgs.tx([ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}), + Chgs.ddlx(ddlx) + ]) + + assert {:ok, tx, _consumer, _loader} = Consumer.update(tx, consumer, loader) + + assert tx.changes == [ + Chgs.insert({"public", "kittens"}, %{"size" => "cute"}) + ] + end +end diff --git a/components/electric/test/electric/satellite/permissions/join_table_test.exs b/components/electric/test/electric/satellite/permissions/join_table_test.exs index 47644b8ea6..8346ec727e 100644 --- a/components/electric/test/electric/satellite/permissions/join_table_test.exs +++ b/components/electric/test/electric/satellite/permissions/join_table_test.exs @@ -8,6 +8,8 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do Tree } + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Postgres.MockSchemaLoader alias Electric.Satellite.Permissions alias Electric.Satellite.Permissions.Graph @@ -43,6 +45,28 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do describe "simple join table" do setup do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + "create table restaurants (id uuid primary key)", + "create table orders (id uuid primary key)", + "create table riders (id uuid primary key)", + """ + create table order_riders ( + id uuid primary key, + order_id uuid not null references orders (id), + rider_id uuid not null references riders (id) + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + tree = Tree.new( [ @@ -60,7 +84,7 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do ) tree = add_order(tree, "rt1", "or1") - {:ok, tree: tree} + {:ok, tree: tree, loader: loader, schema_version: schema_version} end test "scope_id resolves across join tables", cxt do @@ -96,6 +120,50 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do describe "more complex schema" do setup do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + "create table restaurants (id uuid primary key)", + "create table customers (id uuid primary key)", + "create table riders (id uuid primary key)", + "create table addresses (id uuid primary key, customer_id uuid references customers (id))", + """ + create table orders ( + id uuid primary key, + restaurant_id uuid not null references restaurants (id), + customer_id uuid not null references customers (id), + address_id uuid not null references addresses (id) + ) + """, + """ + create table dishes ( + id uuid primary key, + restaurant_id uuid not null references restaurants (id) + ) + """, + """ + create table order_riders ( + id uuid primary key, + order_id uuid not null references orders (id), + rider_id uuid not null references riders (id) + ) + """, + """ + create table order_dishes ( + id uuid primary key, + order_id uuid not null references orders (id), + dish_id uuid not null references dishes (id) + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + tree = Tree.new( [ @@ -161,7 +229,7 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree} + {:ok, tree: tree, loader: loader, schema_version: schema_version} end test "scope_id/3", cxt do @@ -258,12 +326,14 @@ defmodule Electric.Satellite.Permissions.JoinTableTest do perms = perms_build( + cxt, [ ~s[GRANT READ ON #{table(@orders)} TO (#{table(@orders)}, 'rider')], - ~s[GRANT READ ON #{table(@addresses)} TO (#{table(@orders)}, 'rider')] + ~s[GRANT READ ON #{table(@addresses)} TO (#{table(@orders)}, 'rider')], + ~s[ASSIGN (#{table(@orders)}, 'rider') TO #{table(@order_riders)}.user_id] ], [ - Roles.role("rider", @orders, "c2-r2-o2") + Roles.role("rider", @orders, "c2-r2-o2", "assign-1") ] ) diff --git a/components/electric/test/electric/satellite/permissions/transient_test.exs b/components/electric/test/electric/satellite/permissions/transient_test.exs index 11379d160d..3bf1240472 100644 --- a/components/electric/test/electric/satellite/permissions/transient_test.exs +++ b/components/electric/test/electric/satellite/permissions/transient_test.exs @@ -116,12 +116,12 @@ defmodule Electric.Satellite.Permissions.TransientTest do role_grants = [ - Roles.role("editor", @projects, "p1", assign_id: "assign-01"), - Roles.role("editor", @projects, "p2", assign_id: "assign-01"), - Roles.role("editor", @projects, "p3", assign_id: "assign-01"), - Roles.role("reader", @projects, "p1", assign_id: "assign-02"), - Roles.role("reader", @projects, "p2", assign_id: "assign-02"), - Roles.role("reader", @projects, "p3", assign_id: "assign-02") + Roles.role("editor", @projects, "p1", "assign-01"), + Roles.role("editor", @projects, "p2", "assign-01"), + Roles.role("editor", @projects, "p3", "assign-01"), + Roles.role("reader", @projects, "p1", "assign-02"), + Roles.role("reader", @projects, "p2", "assign-02"), + Roles.role("reader", @projects, "p3", "assign-02") ] |> Enum.map(&%RoleGrant{role: Role.new(&1)}) diff --git a/components/electric/test/electric/satellite/permissions/trigger_test.exs b/components/electric/test/electric/satellite/permissions/trigger_test.exs index 61c08dbe3f..05c466bcc0 100644 --- a/components/electric/test/electric/satellite/permissions/trigger_test.exs +++ b/components/electric/test/electric/satellite/permissions/trigger_test.exs @@ -1,8 +1,9 @@ defmodule Electric.Satellite.Permissions.TriggerTest do use ExUnit.Case, async: true + use Electric.Postgres.MockSchemaLoader - alias Electric.Satellite.Permissions alias Electric.Satellite.Permissions.Trigger + alias Electric.Satellite.SatPerms alias ElectricTest.PermissionsHelpers.{ Auth, @@ -21,6 +22,58 @@ defmodule Electric.Satellite.Permissions.TriggerTest do @project_memberships {"public", "project_memberships"} setup do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + "create table users (id uuid primary key)", + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id))", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id))", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + role text not null + ) + """ + # "create table regions (id uuid primary key)", + # "create table offices (id uuid primary key, region_id uuid not null references regions (id))", + # "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + # "create table users (id uuid primary key)", + # "create table teams (id uuid primary key)", + # """ + # create table team_memberships ( + # id uuid primary key, + # user_id uuid not null references users (id), + # team_id uuid not null references teams (id), + # team_role text not null + # ) + # """, + # """ + # create table site_admins ( + # id uuid primary key, + # user_id uuid not null references users (id), + # site_role text not null + # ) + # """, + # """ + # create table my_default.admin_users ( + # id uuid primary key, + # user_id uuid not null references users (id) + # ) + # """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + tree = Tree.new( [ @@ -42,7 +95,7 @@ defmodule Electric.Satellite.Permissions.TriggerTest do {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree} + {:ok, tree: tree, loader: loader, schema_version: schema_version} end def assign(ddlx) do @@ -51,6 +104,10 @@ defmodule Electric.Satellite.Permissions.TriggerTest do assign end + def callback(event, change, :loader) do + {event, change} + end + describe "for_assign/1" do test "generates a function that turns inserts into transient roles", cxt do assign = @@ -58,10 +115,13 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "assign (projects, #{table(@project_memberships)}.role) to #{table(@project_memberships)}.user_id" ) - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + assert is_function(fun, 2) - %{user_id: user_id} = auth = Auth.user() + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -71,27 +131,34 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "role" => "admin" }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "admin", + assign_id: ^assign_id, user_id: ^user_id, - scope: {@projects, ["p1"]} + scope: %SatPerms.Scope{ + table: %SatPerms.Table{schema: "public", name: "projects"}, + id: ["p1"] + } } = role - assert [] = fun.(change, cxt.tree, Auth.user("1191723b-37a5-46c8-818e-326cfbc2c0a7")) - assert [] = fun.(change, cxt.tree, Auth.nobody()) + # assert [] = fun.(change, cxt.tree, Auth.user("1191723b-37a5-46c8-818e-326cfbc2c0a7")) + # assert [] = fun.(change, cxt.tree, Auth.nobody()) end test "supports static role names", cxt do assign = assign("assign (projects, 'something') to #{table(@project_memberships)}.user_id") - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) - %{user_id: user_id} = auth = Auth.user() + assert is_function(fun, 2) + + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -100,13 +167,17 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "user_id" => user_id }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "something", + assign_id: ^assign_id, user_id: ^user_id, - scope: {@projects, ["p1"]} + scope: %SatPerms.Scope{ + table: %SatPerms.Table{schema: "public", name: "projects"}, + id: ["p1"] + } } = role end @@ -116,10 +187,13 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "assign #{table(@project_memberships)}.role to #{table(@project_memberships)}.user_id" ) - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + assert is_function(fun, 2) - %{user_id: user_id} = auth = Auth.user() + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -129,11 +203,12 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "role" => "admin" }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "admin", + assign_id: ^assign_id, user_id: ^user_id, scope: nil } = role @@ -143,10 +218,13 @@ defmodule Electric.Satellite.Permissions.TriggerTest do assign = assign("assign 'something' to #{table(@project_memberships)}.user_id") - assert [{@project_memberships, fun}] = Trigger.for_assign(assign) - assert is_function(fun, 3) + assert {@project_memberships, fun} = + Trigger.for_assign(assign, cxt.schema_version, &callback/3) + + assert is_function(fun, 2) - %{user_id: user_id} = auth = Auth.user() + %{id: assign_id} = assign + user_id = Auth.user_id() change = Chgs.insert(@project_memberships, %{ @@ -155,11 +233,12 @@ defmodule Electric.Satellite.Permissions.TriggerTest do "user_id" => user_id }) - assert [{:insert, {@project_memberships, ["pm1"]}, role}] = fun.(change, cxt.tree, auth) + assert {{:insert, role}, ^change} = fun.(change, :loader) - assert %Permissions.Role{ - id: ["pm1"], + assert %SatPerms.Role{ + row_id: ["pm1"], role: "something", + assign_id: ^assign_id, user_id: ^user_id, scope: nil } = role diff --git a/components/electric/test/electric/satellite/permissions_test.exs b/components/electric/test/electric/satellite/permissions_test.exs index 97d89dd06e..8df13e3bcc 100644 --- a/components/electric/test/electric/satellite/permissions_test.exs +++ b/components/electric/test/electric/satellite/permissions_test.exs @@ -10,11 +10,14 @@ defmodule Electric.Satellite.PermissionsTest do Tree } + alias Electric.Postgres.Extension.SchemaLoader + alias Electric.Postgres.MockSchemaLoader alias Electric.Satellite.{Permissions, Permissions.MoveOut} alias Electric.Replication.Changes import ElectricTest.PermissionsHelpers + @users {"public", "users"} @regions {"public", "regions"} @offices {"public", "offices"} @workspaces {"public", "workspaces"} @@ -23,8 +26,60 @@ defmodule Electric.Satellite.PermissionsTest do @comments {"public", "comments"} @reactions {"public", "reactions"} @project_memberships {"public", "project_memberships"} + @projects_assign ~s[ELECTRIC ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] + @global_assign ~s[ELECTRIC ASSIGN #{table(@users)}.role TO #{table(@users)}.id] setup do + loader_spec = + MockSchemaLoader.backend_spec( + migrations: [ + {"01", + [ + "create table regions (id uuid primary key)", + "create table offices (id uuid primary key, region_id uuid not null references regions (id))", + "create table workspaces (id uuid primary key)", + "create table projects (id uuid primary key, workspace_id uuid not null references workspaces (id))", + "create table issues (id uuid primary key, project_id uuid not null references projects (id))", + "create table comments (id uuid primary key, issue_id uuid not null references issues (id))", + "create table reactions (id uuid primary key, comment_id uuid not null references comments (id))", + "create table users (id uuid primary key, role text not null default 'normie')", + "create table teams (id uuid primary key)", + """ + create table project_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + project_id uuid not null references projects (id), + project_role text not null + ) + """, + """ + create table team_memberships ( + id uuid primary key, + user_id uuid not null references users (id), + team_id uuid not null references teams (id), + team_role text not null + ) + """, + """ + create table site_admins ( + id uuid primary key, + user_id uuid not null references users (id), + site_role text not null + ) + """, + """ + create table my_default.admin_users ( + id uuid primary key, + user_id uuid not null references users (id) + ) + """ + ]} + ] + ) + + {:ok, loader} = SchemaLoader.connect(loader_spec, []) + {:ok, schema_version} = SchemaLoader.load(loader) + tree = Tree.new( [ @@ -69,16 +124,20 @@ defmodule Electric.Satellite.PermissionsTest do {:ok, _} = start_supervised(Perms.Transient) - {:ok, tree: tree} + {:ok, tree: tree, loader: loader, schema_version: schema_version} end describe "validate_write/3" do test "scoped role, scoped grant", cxt do perms = perms_build( - ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + @projects_assign + ], [ - Roles.role("editor", @projects, "p2") + Roles.role("editor", @projects, "p2", "assign-1") ] ) @@ -118,9 +177,13 @@ defmodule Electric.Satellite.PermissionsTest do test "unscoped role, scoped grant", cxt do perms = perms_build( - ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO (projects, 'editor')], + @global_assign + ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -138,10 +201,14 @@ defmodule Electric.Satellite.PermissionsTest do test "scoped role, unscoped grant", cxt do perms = perms_build( - ~s[GRANT ALL ON #{table(@comments)} TO 'editor'], + cxt, + [ + ~s[GRANT ALL ON #{table(@comments)} TO 'editor'], + @projects_assign + ], [ # we have an editor role within project p2 - Roles.role("editor", @projects, "p2") + Roles.role("editor", @projects, "p2", "assign-1") ] ) @@ -169,12 +236,14 @@ defmodule Electric.Satellite.PermissionsTest do test "grant for different table", cxt do perms = perms_build( + cxt, [ ~s[GRANT SELECT ON #{table(@comments)} TO 'editor'], - ~s[GRANT ALL ON #{table(@reactions)} TO 'editor'] + ~s[GRANT ALL ON #{table(@reactions)} TO 'editor'], + @global_assign ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -200,9 +269,13 @@ defmodule Electric.Satellite.PermissionsTest do test "unscoped role, unscoped grant", cxt do perms = perms_build( - ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], + @global_assign + ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -234,13 +307,16 @@ defmodule Electric.Satellite.PermissionsTest do test "scoped role, change outside of scope", cxt do perms = perms_build( + cxt, [ ~s[GRANT UPDATE ON #{table(@comments)} TO 'editor'], - ~s[GRANT ALL ON #{table(@regions)} TO 'admin'] + ~s[GRANT ALL ON #{table(@regions)} TO 'admin'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p2"), - Roles.role("admin") + Roles.role("editor", @projects, "p2", "assign-1"), + Roles.role("admin", "assign-2") ] ) @@ -256,6 +332,30 @@ defmodule Electric.Satellite.PermissionsTest do ) end + test "role with no matching assign", cxt do + perms = + perms_build( + cxt, + [ + ~s[GRANT UPDATE ON #{table(@comments)} TO (#{table(@projects)}, 'editor')] + ], + [ + Roles.role("editor", @projects, "p1", "non-existant") + ] + ) + + assert {:error, _} = + Permissions.validate_write( + perms, + cxt.tree, + Chgs.tx([ + Chgs.update(@comments, %{"id" => "c1", "comment" => "old comment"}, %{ + "comment" => "new comment" + }) + ]) + ) + end + test "overlapping global and scoped perms", cxt do # Test that even though the global perm doesn't grant # the required permissions, the scoped perms are checked @@ -264,13 +364,16 @@ defmodule Electric.Satellite.PermissionsTest do # until we run out of get permission. perms = perms_build( + cxt, [ ~s[GRANT UPDATE (description) ON #{table(@issues)} TO (projects, 'editor')], - ~s[GRANT UPDATE (title) ON #{table(@issues)} TO 'editor'] + ~s[GRANT UPDATE (title) ON #{table(@issues)} TO 'editor'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("editor") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", "assign-2") ] ) @@ -289,6 +392,7 @@ defmodule Electric.Satellite.PermissionsTest do test "AUTHENTICATED w/user_id", cxt do perms = perms_build( + cxt, ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], [] ) @@ -306,6 +410,7 @@ defmodule Electric.Satellite.PermissionsTest do test "AUTHENTICATED w/o permission", cxt do perms = perms_build( + cxt, ~s[GRANT SELECT ON #{table(@comments)} TO AUTHENTICATED], [] ) @@ -323,6 +428,7 @@ defmodule Electric.Satellite.PermissionsTest do test "AUTHENTICATED w/o user_id", cxt do perms = perms_build( + cxt, ~s[GRANT ALL ON #{table(@comments)} TO AUTHENTICATED], [], auth: Auth.nobody() @@ -341,6 +447,7 @@ defmodule Electric.Satellite.PermissionsTest do test "ANYONE w/o user_id", cxt do perms = perms_build( + cxt, ~s[GRANT ALL ON #{table(@comments)} TO ANYONE], [], auth: Auth.nobody() @@ -359,12 +466,14 @@ defmodule Electric.Satellite.PermissionsTest do test "protected columns", cxt do perms = perms_build( + cxt, [ ~s[GRANT INSERT (id, text) ON #{table(@comments)} TO 'editor'], - ~s[GRANT UPDATE (text) ON #{table(@comments)} TO 'editor'] + ~s[GRANT UPDATE (text) ON #{table(@comments)} TO 'editor'], + @global_assign ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -415,16 +524,18 @@ defmodule Electric.Satellite.PermissionsTest do test "moves between auth scopes", cxt do perms = perms_build( + cxt, [ ~s[GRANT UPDATE ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT SELECT ON #{table(@issues)} TO 'reader'] + ~s[GRANT SELECT ON #{table(@issues)} TO 'reader'], + @projects_assign ], [ # update rights on p1 & p3 - Roles.role("editor", @projects, "p1"), - Roles.role("editor", @projects, "p3"), + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p3", "assign-1"), # read-only role on project p2 - Roles.role("reader", @projects, "p2") + Roles.role("reader", @projects, "p2", "assign-1") ] ) @@ -455,13 +566,15 @@ defmodule Electric.Satellite.PermissionsTest do test "write in scope tree", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')] + ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1") + Roles.role("editor", @projects, "p1", "assign-1") ] ) @@ -495,9 +608,10 @@ defmodule Electric.Satellite.PermissionsTest do describe "intermediate roles" do # roles that are created on the client and then used within the same tx before triggers have # run on pg - setup(_cxt) do + setup(cxt) do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'manager')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'manager')], @@ -508,12 +622,13 @@ defmodule Electric.Satellite.PermissionsTest do ~s[GRANT ALL ON #{table(@projects)} TO 'project_admin'], ~s[GRANT ALL ON #{table(@project_memberships)} TO 'project_admin'], # the assign rule for the 'manager' role - ~s[ASSIGN (#{table(@projects)}, #{table(@project_memberships)}.role) TO #{table(@project_memberships)}.user_id] + @projects_assign, + @global_assign ], [ # start with the ability to create projects and memberships - Roles.role("project_admin"), - Roles.role("manager", @projects, "p1", assign_id: "assign-1") + Roles.role("manager", @projects, "p1", "assign-1", row_id: ["pm1"]), + Roles.role("project_admin", "assign-2") ] ) @@ -659,7 +774,9 @@ defmodule Electric.Satellite.PermissionsTest do Chgs.tx([ Chgs.delete(@project_memberships, %{ "id" => "pm100", - "project_id" => "p100" + "project_id" => "p100", + "user_id" => Auth.user_id(), + "role" => "manager" }), Chgs.insert(@issues, %{"id" => "i101", "project_id" => "p100"}) ]) @@ -762,15 +879,17 @@ defmodule Electric.Satellite.PermissionsTest do setup(cxt) do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT SELECT ON #{table(@issues)} TO (#{table(@projects)}, 'reader')] + ~s[GRANT SELECT ON #{table(@issues)} TO (#{table(@projects)}, 'reader')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1", assign_id: "assign-01"), + Roles.role("editor", @projects, "p1", "assign-1"), # read-only role on project p2 - Roles.role("reader", @projects, "p2", assign_id: "assign-01"), - Roles.role("editor", @projects, "p3", assign_id: "assign-01") + Roles.role("reader", @projects, "p2", "assign-1"), + Roles.role("editor", @projects, "p3", "assign-1") ] ) @@ -792,7 +911,7 @@ defmodule Electric.Satellite.PermissionsTest do assert {:ok, _perms} = cxt.perms |> Perms.add_transient( - assign_id: "assign-01", + assign_id: "assign-1", target_relation: @issues, target_id: ["i3"], scope_id: ["p1"], @@ -814,7 +933,7 @@ defmodule Electric.Satellite.PermissionsTest do assert {:error, _} = cxt.perms |> Perms.add_transient( - assign_id: "assign-01", + assign_id: "assign-1", target_relation: @issues, target_id: ["i4"], scope_id: ["p1"], @@ -836,7 +955,7 @@ defmodule Electric.Satellite.PermissionsTest do assert {:error, _} = cxt.perms |> Perms.add_transient( - assign_id: "assign-01", + assign_id: "assign-1", target_relation: @issues, target_id: ["i3"], scope_id: ["p1"], @@ -857,17 +976,20 @@ defmodule Electric.Satellite.PermissionsTest do test "removes changes we don't have permissions to see", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], ~s[GRANT READ ON #{table(@issues)} TO (#{table(@projects)}, 'reader')], ~s[GRANT READ ON #{table(@comments)} TO (#{table(@projects)}, 'reader')], - ~s[GRANT ALL ON #{table(@workspaces)} TO 'global_admin'] + ~s[GRANT ALL ON #{table(@workspaces)} TO 'global_admin'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("reader", @projects, "p2"), - Roles.role("global_admin") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("reader", @projects, "p2", "assign-1"), + Roles.role("global_admin", "assign-2") ] ) @@ -899,11 +1021,13 @@ defmodule Electric.Satellite.PermissionsTest do test "ignores column limits in grants", cxt do perms = perms_build( + cxt, [ - ~s[GRANT READ (id, title) ON #{table(@issues)} TO 'editor'] + ~s[GRANT READ (id, title) ON #{table(@issues)} TO 'editor'], + @global_assign ], [ - Roles.role("editor") + Roles.role("editor", "assign-1") ] ) @@ -922,13 +1046,15 @@ defmodule Electric.Satellite.PermissionsTest do test "incorporates in-tx additions to scope", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')] + ~s[GRANT ALL ON #{table(@reactions)} TO (#{table(@projects)}, 'editor')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1") + Roles.role("editor", @projects, "p1", "assign-1") ] ) @@ -950,13 +1076,15 @@ defmodule Electric.Satellite.PermissionsTest do test "incorporates in-tx removals from scope", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], - ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')] + ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], + @projects_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("editor", @projects, "p2") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p2", "assign-1") ] ) @@ -1037,16 +1165,19 @@ defmodule Electric.Satellite.PermissionsTest do test "removal from a scope but with global permissions", cxt do perms = perms_build( + cxt, [ ~s[GRANT ALL ON #{table(@issues)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@comments)} TO (#{table(@projects)}, 'editor')], ~s[GRANT ALL ON #{table(@issues)} TO 'admin'], - ~s[GRANT ALL ON #{table(@comments)} TO 'admin'] + ~s[GRANT ALL ON #{table(@comments)} TO 'admin'], + @projects_assign, + @global_assign ], [ - Roles.role("editor", @projects, "p1"), - Roles.role("editor", @projects, "p2"), - Roles.role("admin") + Roles.role("editor", @projects, "p1", "assign-1"), + Roles.role("editor", @projects, "p2", "assign-1"), + Roles.role("admin", "assign-2") ] ) diff --git a/components/electric/test/support/mock_schema_loader.ex b/components/electric/test/support/mock_schema_loader.ex index 3905c1b887..0f8478ac5f 100644 --- a/components/electric/test/support/mock_schema_loader.ex +++ b/components/electric/test/support/mock_schema_loader.ex @@ -5,6 +5,17 @@ defmodule Electric.Postgres.MockSchemaLoader do Schema } + alias Electric.Satellite.SatPerms + + defmacro __using__(_opts) do + quote do + alias Electric.Postgres.MockSchemaLoader + alias Electric.Postgres.Extension.SchemaLoader + end + end + + defstruct versions: [], opts: [], global_perms: [], user_perms: [] + def oid_loader(type, schema, name) do {:ok, Enum.join(["#{type}", schema, name], ".") |> :erlang.phash2(50_000)} end @@ -44,7 +55,7 @@ defmodule Electric.Postgres.MockSchemaLoader do def start_link(opts, args \\ []) do {module, spec} = agent_spec(opts, args) - {:ok, state} = connect([], spec) + {:ok, state} = connect(spec, []) {module, state} end @@ -139,14 +150,14 @@ defmodule Electric.Postgres.MockSchemaLoader do {:agent, pid} end - def receive_tx({versions, opts}, %{"txid" => _txid, "txts" => _txts} = row, version) do + def receive_tx(%{opts: opts} = state, %{"txid" => _txid, "txts" => _txts} = row, version) do key = tx_key(row) - {versions, Map.update(opts, :txids, %{key => version}, &Map.put(&1, key, version))} + %{state | opts: Map.update(opts, :txids, %{key => version}, &Map.put(&1, key, version))} end # ignore rows that don't define a txid, txts key - def receive_tx({versions, opts}, _row, _version) do - {versions, opts} + def receive_tx(state, _row, _version) do + state end def electrify_table({__MODULE__, state}, {schema, table}) do @@ -158,9 +169,17 @@ defmodule Electric.Postgres.MockSchemaLoader do {:agent, pid} end - def electrify_table({versions, opts}, {schema, table}) do - {versions, - Map.update(opts, :tables, %{{schema, table} => true}, &Map.put(&1, {schema, table}, true))} + def electrify_table(%{opts: opts} = state, {schema, table}) do + %{ + state + | opts: + Map.update( + opts, + :tables, + %{{schema, table} => true}, + &Map.put(&1, {schema, table}, true) + ) + } end defp tx_key(%{"txid" => txid, "txts" => txts}) do @@ -169,12 +188,12 @@ defmodule Electric.Postgres.MockSchemaLoader do @behaviour SchemaLoader - @impl true - def connect(_conn_config, {:agent, pid}) do + @impl SchemaLoader + def connect({:agent, pid}, _conn_config) do {:ok, {:agent, pid}} end - def connect(conn_config, {:agent, opts, args}) do + def connect({:agent, opts, args}, conn_config) do name = Keyword.get(args, :name) pid = name && GenServer.whereis(name) @@ -182,44 +201,45 @@ defmodule Electric.Postgres.MockSchemaLoader do # use existing agent {:ok, {:agent, name}} else - with {:ok, conn} <- connect(conn_config, opts), + with {:ok, conn} <- connect(opts, conn_config), {:ok, pid} <- Agent.start_link(fn -> conn end, args) do {:ok, {:agent, name || pid}} end end end - def connect(conn_config, opts) do + def connect(opts, conn_config) do {versions, opts} = opts |> Map.new() |> Map.pop(:versions, []) notify(opts, {:connect, conn_config}) - {:ok, {versions, opts}} + {:ok, %__MODULE__{versions: versions, opts: opts}} end - @impl true + @impl SchemaLoader def load({:agent, pid}) do Agent.get(pid, &load/1) end - def load({[], opts}) do + def load(%{versions: [], opts: opts}) do notify(opts, :load) {:ok, SchemaLoader.Version.new(nil, Schema.new())} end - def load({[%{version: version, schema: schema} | _versions], opts}) do + def load(%{versions: [%{version: version, schema: schema} | _versions], opts: opts}) do + notify(opts, :load) notify(opts, {:load, version, schema}) {:ok, SchemaLoader.Version.new(version, schema)} end - @impl true + @impl SchemaLoader def load({:agent, pid}, version) do Agent.get(pid, &load(&1, version)) end - def load({versions, opts}, version) do + def load(%{versions: versions, opts: opts}, version) do case Enum.find(versions, &(&1.version == version)) do %Migration{schema: schema} -> notify(opts, {:load, version, schema}) @@ -231,7 +251,7 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - @impl true + @impl SchemaLoader def save({:agent, pid}, version, schema, stmts) do with :ok <- Agent.update(pid, fn state -> @@ -242,24 +262,24 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - def save({versions, opts}, version, schema, stmts) do + def save(%{versions: versions, opts: opts} = state, version, schema, stmts) do notify(opts, {:save, version, schema, stmts}) - {:ok, {[mock_version(version, schema, stmts) | versions], opts}, + {:ok, %{state | versions: [mock_version(version, schema, stmts) | versions]}, SchemaLoader.Version.new(version, schema)} end - @impl true + @impl SchemaLoader def relation_oid({:agent, pid}, type, schema, name) do Agent.get(pid, &relation_oid(&1, type, schema, name)) end - def relation_oid({_versions, %{oid_loader: oid_loader}}, type, schema, name) + def relation_oid(%{opts: %{oid_loader: oid_loader}}, type, schema, name) when is_function(oid_loader, 3) do oid_loader.(type, schema, name) end - def relation_oid({_versions, opts}, type, schema, name) do + def relation_oid(%{opts: opts}, type, schema, name) do notify(opts, {:relation_oid, type, schema, name}) with %{} = oids <- get_in(opts, [:oids, type]), @@ -270,22 +290,22 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - @impl true + @impl SchemaLoader def refresh_subscription({:agent, pid}, name) do Agent.get(pid, &refresh_subscription(&1, name)) end - def refresh_subscription({_versions, opts}, name) do + def refresh_subscription(%{opts: opts}, name) do notify(opts, {:refresh_subscription, name}) :ok end - @impl true + @impl SchemaLoader def migration_history({:agent, pid}, after_version) do Agent.get(pid, &migration_history(&1, after_version)) end - def migration_history({versions, opts}, after_version) do + def migration_history(%{versions: versions, opts: opts}, after_version) do notify(opts, {:migration_history, after_version}) migrations = @@ -300,18 +320,18 @@ defmodule Electric.Postgres.MockSchemaLoader do {:ok, migrations} end - @impl true + @impl SchemaLoader def known_migration_version?({:agent, pid}, version) do Agent.get(pid, &known_migration_version?(&1, version)) end - def known_migration_version?({versions, opts}, version) do + def known_migration_version?(%{versions: versions, opts: opts}, version) do notify(opts, {:known_migration_version?, version}) Enum.any?(versions, &(&1.version == version)) end - @impl true + @impl SchemaLoader def internal_schema(_state) do Schema.new() end @@ -320,7 +340,7 @@ defmodule Electric.Postgres.MockSchemaLoader do Agent.get(pid, &electrified_tables/1) end - def electrified_tables({[version | _versions], _opts}) do + def electrified_tables(%{versions: [version | _versions]}) do {:ok, Schema.table_info(version.schema)} end @@ -328,12 +348,12 @@ defmodule Electric.Postgres.MockSchemaLoader do {:ok, []} end - @impl true + @impl SchemaLoader def table_electrified?({:agent, pid}, {schema, name}) do Agent.get(pid, &table_electrified?(&1, {schema, name})) end - def table_electrified?({_versions, opts} = state, {schema, name}) do + def table_electrified?(%{opts: opts} = state, {schema, name}) do if Map.get(opts.tables, {schema, name}) do {:ok, true} else @@ -343,12 +363,12 @@ defmodule Electric.Postgres.MockSchemaLoader do end end - @impl true + @impl SchemaLoader def index_electrified?({:agent, pid}, {schema, name}) do Agent.get(pid, &index_electrified?(&1, {schema, name})) end - def index_electrified?({[version | _versions], _opts}, {schema, name}) do + def index_electrified?(%{versions: [version | _versions]}, {schema, name}) do {:ok, Enum.any?( Schema.indexes(version.schema, include_constraints: false), @@ -360,12 +380,16 @@ defmodule Electric.Postgres.MockSchemaLoader do send(parent, {__MODULE__, msg}) end - @impl true + defp notify(_, _msg) do + :ok + end + + @impl SchemaLoader def tx_version({:agent, pid}, row) do Agent.get(pid, &tx_version(&1, row)) end - def tx_version({versions, opts}, %{"txid" => txid, "txts" => txts} = row) do + def tx_version(%{versions: versions, opts: opts}, %{"txid" => txid, "txts" => txts} = row) do notify(opts, {:tx_version, txid, txts}) key = tx_key(row) @@ -392,4 +416,167 @@ defmodule Electric.Postgres.MockSchemaLoader do {:ok, version} end end + + @impl SchemaLoader + def global_permissions({:agent, pid}) do + Agent.get(pid, &global_permissions(&1)) + end + + def global_permissions(%{global_perms: []}) do + {:ok, initial_global_perms()} + end + + def global_permissions(%{global_perms: [perms | _]}) do + {:ok, perms} + end + + @impl SchemaLoader + def global_permissions({:agent, pid}, id) do + Agent.get(pid, &global_permissions(&1, id)) + end + + def global_permissions(%{global_perms: [], opts: opts}, 1) do + notify(opts, :global_permissions) + {:ok, initial_global_perms()} + end + + def global_permissions(%{global_perms: []}, id) do + {:error, "global perms with id #{id} not found"} + end + + def global_permissions(%{global_perms: perms, opts: opts}, id) do + notify(opts, {:global_permissions, id}) + + case Enum.find(perms, &(&1.id == id)) do + nil -> {:error, "global perms with id #{id} not found"} + perms -> {:ok, perms} + end + end + + @impl SchemaLoader + def user_permissions({:agent, pid}, user_id) do + Agent.get_and_update(pid, fn state -> + case user_permissions(state, user_id) do + {:ok, state, perms} -> + {{:ok, {:agent, pid}, perms}, state} + + error -> + {error, state} + end + end) + end + + def user_permissions(%{user_perms: user_perms, opts: opts} = state, user_id) do + notify(opts, {:user_permissions, user_id}) + + case(Enum.find(user_perms, &(&1.user_id == user_id))) do + nil -> + id = next_user_perms_id(state) + + {:ok, global} = global_permissions(state) + perms = %SatPerms{id: id, user_id: user_id, rules: global} + {:ok, %{state | user_perms: [perms | user_perms]}, perms} + + perms -> + {:ok, state, perms} + end + end + + @impl SchemaLoader + def user_permissions({:agent, pid}, user_id, perms_id) do + Agent.get(pid, &user_permissions(&1, user_id, perms_id)) + end + + def user_permissions(%{user_perms: user_perms, opts: opts}, user_id, perms_id) do + notify(opts, {:user_permissions, user_id, perms_id}) + + case(Enum.find(user_perms, &(&1.user_id == user_id && &1.id == perms_id))) do + nil -> + {:error, "perms id #{perms_id} not found for user #{user_id}"} + + perms -> + {:ok, perms} + end + end + + @impl SchemaLoader + def save_global_permissions({:agent, pid}, rules) do + Agent.get_and_update(pid, fn state -> + case save_global_permissions(state, rules) do + {:ok, state} -> + {{:ok, {:agent, pid}}, state} + + error -> + {error, state} + end + end) + end + + def save_global_permissions( + %{global_perms: global_perms, opts: opts} = state, + %SatPerms.Rules{} = rules + ) do + notify(opts, {:save_global_permissions, rules}) + + # duplicate all the current user perms with the updated rules, as per the pg version + {user_perms, _id} = + state.user_perms + |> Enum.filter(&(&1.rules.id == rules.parent_id)) + |> Enum.uniq_by(& &1.user_id) + |> Enum.map_reduce(next_user_perms_id(state), fn user_perms, id -> + {%{user_perms | id: id, rules: rules}, id + 1} + end) + + {:ok, + %{state | user_perms: user_perms ++ state.user_perms, global_perms: [rules | global_perms]}} + end + + @impl SchemaLoader + def save_user_permissions({:agent, pid}, user_id, roles) do + Agent.get_and_update(pid, fn state -> + case save_user_permissions(state, user_id, roles) do + {:ok, state, perms} -> + {{:ok, {:agent, pid}, perms}, state} + + error -> + {error, state} + end + end) + end + + def save_user_permissions( + %{user_perms: user_perms, opts: opts} = state, + user_id, + %SatPerms.Roles{} = perms + ) do + notify(opts, {:save_user_permissions, user_id, perms}) + %{rules_id: rules_id, parent_id: parent_id, roles: roles} = perms + + global = + cond do + rules_id == 1 -> initial_global_perms() + global = Enum.find(state.global_perms, &(&1.id == rules_id)) -> global + true -> nil + end + + if global do + if parent_id && !Enum.find(user_perms, &(&1.id == parent_id)) do + {:error, "invalid parent permissions id #{parent_id}"} + else + id = next_user_perms_id(state) + + perms = %SatPerms{id: id, user_id: user_id, rules: global, roles: roles} + {:ok, %{state | user_perms: [perms | user_perms]}, perms} + end + else + {:error, "invalid global permissions id #{rules_id}"} + end + end + + defp next_user_perms_id(%{user_perms: []}), do: 1 + defp next_user_perms_id(%{user_perms: [%{id: id} | _]}), do: id + 1 + + defp initial_global_perms do + %SatPerms.Rules{id: 1} + end end diff --git a/components/electric/test/support/permissions_helpers.ex b/components/electric/test/support/permissions_helpers.ex index 481de4ea6b..05abb4c882 100644 --- a/components/electric/test/support/permissions_helpers.ex +++ b/components/electric/test/support/permissions_helpers.ex @@ -49,9 +49,10 @@ defmodule ElectricTest.PermissionsHelpers do Permissions.new(auth, Transient.name()) end - def update(perms, ddlx, roles) do + def update(perms, schema_version, ddlx, roles) do Permissions.update( perms, + schema_version, to_rules(ddlx), roles ) @@ -111,7 +112,9 @@ defmodule ElectricTest.PermissionsHelpers do end defmodule Chgs do + alias Electric.DDLX.Command alias Electric.Replication.Changes + alias Electric.Postgres.Extension def tx(changes, attrs \\ []) do %Changes.Transaction{changes: changes} @@ -137,6 +140,23 @@ defmodule ElectricTest.PermissionsHelpers do |> put_change_attrs(attrs) end + def ddlx(attrs) when is_list(attrs) do + attrs + |> Command.ddlx() + |> ddlx() + end + + def ddlx(ddlx) do + bytes = Protox.encode!(ddlx) |> IO.iodata_to_binary() + + %Changes.NewRecord{ + relation: Extension.ddlx_relation(), + record: %{ + "ddlx" => bytes + } + } + end + defp put_tx_attrs(tx, attrs) do Map.put(tx, :lsn, LSN.new(attrs[:lsn])) end @@ -151,17 +171,20 @@ defmodule ElectricTest.PermissionsHelpers do defmodule Roles do alias Electric.Satellite.SatPerms, as: P - def role(role_name) do - %P.Role{role: role_name} + def role(role_name, assign_id) do + %P.Role{role: role_name, assign_id: assign_id} end - def role(role_name, table, id, attrs \\ []) do - %P.Role{ - assign_id: attrs[:assign_id], - role: role_name, - user_id: Keyword.get(attrs, :user_id, Auth.user_id()), - scope: %P.Scope{table: relation(table), id: List.wrap(id)} - } + def role(role_name, table, id, assign_id, attrs \\ []) do + struct( + %P.Role{ + assign_id: assign_id, + role: role_name, + user_id: Keyword.get(attrs, :user_id, Auth.user_id()), + scope: %P.Scope{table: relation(table), id: List.wrap(id)} + }, + attrs + ) end defp relation({schema, name}) do @@ -373,11 +396,6 @@ defmodule ElectricTest.PermissionsHelpers do end end - @impl Electric.Satellite.Permissions.Graph - def relation_path({_graph, fks}, root, relation) do - fk_path(fks, root, relation) - end - defp fk_path(_fks, root, root) do [root] end @@ -391,19 +409,26 @@ defmodule ElectricTest.PermissionsHelpers do Electric.Utils.inspect_relation(relation) end - def perms_build(grants, roles, attrs \\ []) do + def perms_build(cxt, grants, roles, attrs \\ []) do + %{schema_version: schema_version} = cxt + attrs |> Perms.new() - |> Perms.update(grants, roles) + |> Perms.update(schema_version, grants, roles) end defmodule Proto do + alias Electric.DDLX.Command alias Electric.Satellite.SatPerms def table(schema \\ "public", name) do %SatPerms.Table{schema: schema, name: name} end + def scope(schema \\ "public", name) do + table(schema, name) + end + def role(name) do %SatPerms.RoleName{role: {:application, name}} end @@ -415,5 +440,29 @@ defmodule ElectricTest.PermissionsHelpers do def anyone() do %SatPerms.RoleName{role: {:predefined, :ANYONE}} end + + def assign(attrs) do + SatPerms.Assign |> struct(attrs) |> Command.put_id() + end + + def unassign(attrs) do + SatPerms.Unassign |> struct(attrs) |> Command.put_id() + end + + def grant(attrs) do + SatPerms.Grant |> struct(attrs) |> Command.put_id() + end + + def revoke(attrs) do + SatPerms.Revoke |> struct(attrs) |> Command.put_id() + end + + def sqlite(stmt) do + %SatPerms.Sqlite{stmt: stmt} |> Command.put_id() + end + + def encode(struct) do + Protox.encode!(struct) |> IO.iodata_to_binary() + end end end diff --git a/e2e/tests/06.02_permissions_change_propagation.lux b/e2e/tests/06.02_permissions_change_propagation.lux new file mode 100644 index 0000000000..fc08d8dc00 --- /dev/null +++ b/e2e/tests/06.02_permissions_change_propagation.lux @@ -0,0 +1,96 @@ +[doc Permissions changes are propagated to client connection] +[include _shared.luxinc] + +[invoke setup] + +[global migration_version_1=20231109154018] +[global migration_version_2=20240226114300] +[global user_id1=95f21e62-4b90-49c3-874a-174eb17e58cf] +[global user_id2=31377df9-c659-493e-b26f-1ce5fbb0b2df] +[global session_id=004d3e42-d072-4a60-9513-93ddd843d478] +[global project_id=99adf0a5-b3c6-45d7-9986-582e76db4556] + + +[shell proxy_1] + [invoke log "run migration $migration_version_1 on postgres"] + """! + BEGIN; + CALL electric.migration_version('$migration_version_1'); + CREATE TABLE "projects" ( + id uuid NOT NULL PRIMARY KEY + ); + CREATE TABLE "issues" ( + id uuid NOT NULL PRIMARY KEY, + project_id uuid NOT NULL REFERENCES projects (id) + ); + CREATE TABLE "users" ( + id uuid NOT NULL PRIMARY KEY + ); + CREATE TABLE "project_memberships" ( + id uuid NOT NULL PRIMARY KEY, + project_id uuid NOT NULL REFERENCES projects (id), + user_id uuid NOT NULL REFERENCES users (id), + role text NOT NULL + ); + + INSERT INTO users (id) VALUES ('$user_id1'); + INSERT INTO users (id) VALUES ('$user_id2'); + INSERT INTO projects (id) VALUES ('$project_id'); + + ALTER TABLE "projects" ENABLE ELECTRIC; + ALTER TABLE "issues" ENABLE ELECTRIC; + ALTER TABLE "users" ENABLE ELECTRIC; + ALTER TABLE "project_memberships" ENABLE ELECTRIC; + COMMIT; + """ + ?$psql + + +[shell electric] + ?? [info] Applying migration $migration_version_1 + +[newshell user_1_ws1] + -$fail_pattern + [invoke start_elixir_test 1] + [invoke client_session $user_id1 $session_id] + + !alias Electric.Satellite.{SatRelation, SatRelationColumn, SatOpInsert, SatOpUpdate, SatOpRow} + ?$eprompt + +[shell proxy_1] + [invoke log "run migration $migration_version_1 on postgres"] + """! + BEGIN; + CALL electric.migration_version('$migration_version_2'); + ELECTRIC ASSIGN (projects, project_memberships.role) TO project_memberships.user_id; + ELECTRIC GRANT ALL ON projects TO (projects, 'member'); + ELECTRIC GRANT ALL ON issues TO (projects, 'member'); + COMMIT; + """ + ?$psql + +[shell electric] + ?user_id=$user_id1 .+ Global permissions updated for connection + +[shell pg_1] + !INSERT INTO project_memberships (id, project_id, user_id, role) VALUES ('c197a4ef-0f22-4af1-acb1-bf7200e64900', '$project_id', '$user_id1', 'member'); + ?$psql + +[shell electric] + ?user_id=$user_id1 .+ User permissions updated for connection + +## role for non-connected user isn't being accepted by connection +[shell pg_1] + !INSERT INTO project_memberships (id, project_id, user_id, role) VALUES ('386fff23-181e-4386-85a2-9a430795a23c', '$project_id', '$user_id2', 'member'); + ?$psql + +[shell electric] + -user_id=$user_id2 .+ User permissions updated for connection + ?%Electric.Replication.Changes.NewRecord{relation: {"public", "project_memberships"}, \ + record: %{"id" => "386fff23-181e-4386-85a2-9a430795a23c", \ + "project_id" => "99adf0a5-b3c6-45d7-9986-582e76db4556", \ + "role" => "member", \ + "user_id" => "$user_id2"} + +[cleanup] + [invoke teardown] diff --git a/e2e/tests/compose.yaml b/e2e/tests/compose.yaml index 90872355c9..5c5a50a60c 100644 --- a/e2e/tests/compose.yaml +++ b/e2e/tests/compose.yaml @@ -19,6 +19,7 @@ services: ELECTRIC_WRITE_TO_PG_MODE: "${ELECTRIC_WRITE_TO_PG_MODE:-logical_replication}" LOGICAL_PUBLISHER_HOST: electric_1 PG_PROXY_LOG_LEVEL: info + ELECTRIC_FEATURES: "proxy_ddlx_grant=true:proxy_ddlx_assign=true" ports: - "5133:5133" # proxy access diff --git a/protocol/satellite.proto b/protocol/satellite.proto index 95442a966c..d48b5965c6 100644 --- a/protocol/satellite.proto +++ b/protocol/satellite.proto @@ -564,7 +564,7 @@ message SatPerms { } message Scope { Table table = 1; - string id = 2; + repeated string id = 2; } enum Privilege { DELETE = 0; @@ -596,6 +596,7 @@ message SatPerms { optional string check = 8; } message Revoke { + string id = 1; Table table = 2; RoleName role = 3; Privilege privilege = 4; @@ -612,6 +613,7 @@ message SatPerms { optional string if = 7; } message Unassign { + string id = 1; Table table = 2; optional string user_column = 3; optional string role_column = 4; @@ -624,18 +626,12 @@ message SatPerms { } message Role { - string id = 1; + repeated string row_id = 1; string role = 2; string user_id = 3; string assign_id = 4; optional Scope scope = 5; } - // split the rules and roles info into distinct messages so they can be - // serialized separately - message Rules { - repeated Grant grants = 1; - repeated Assign assigns = 2; - } message DDLX { repeated Grant grants = 1; repeated Revoke revokes = 2; @@ -643,11 +639,27 @@ message SatPerms { repeated Unassign unassigns = 4; repeated Sqlite sqlite = 5; } + + // split the rules and roles info into distinct messages so they can be + // serialized separately + message Rules { + uint64 id = 1; + optional uint64 parent_id = 2; + repeated Grant grants = 3; + repeated Assign assigns = 4; + } + message Roles { - repeated Role roles = 2; + uint64 id = 1; + optional uint64 parent_id = 2; + uint64 rules_id = 3; + repeated Role roles = 4; } + // this id is the id of the user permissions, this struct is the user + // permissions fused with the global permissions at that point in time int64 id = 1; + string user_id = 2; Rules rules = 3; - Roles roles = 4; + repeated Role roles = 4; }