diff --git a/.clang-format b/.clang-format index 6959bdf7..ad9bd0d2 100644 --- a/.clang-format +++ b/.clang-format @@ -1,4 +1,4 @@ -BasedOnStyle: LLVM +BasedOnStyle: LLVM IndentWidth: 4 BinPackArguments: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..a3d6b56c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,39 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + exclude: ^src/test/resources/.*\.(htm|txt)$ + - id: check-yaml + - id: check-merge-conflict + - id: check-added-large-files + args: ['--maxkb=500'] + - id: mixed-line-ending + args: ['--fix=lf'] + +- repo: https://github.com/humitos/mirrors-autoflake.git + rev: v1.1 + hooks: + - id: autoflake + args: ['--in-place', '--remove-all-unused-imports'] + +- repo: https://github.com/pre-commit/mirrors-isort + rev: v5.9.3 + hooks: + - id: isort + args: ['--multi-line=3', '--trailing-comma', '--force-grid-wrap=0', '--use-parentheses', '--line-width=88'] + +- repo: https://github.com/ambv/black + rev: 21.8b0 + hooks: + - id: black + language_version: python3.8 + args: ['--line-length=88'] + +- repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.1.10 + hooks: + - id: remove-tabs + name: Tabs-to-Spaces + args: [--whitespaces-count, '4'] + types: [sql] diff --git a/README.md b/README.md index fb63deeb..b1f0247a 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ --- -## Summary +## Summary pg_graphql is an experimental PostgreSQL extension adding support for GraphQL. @@ -44,14 +44,14 @@ Set up an interactive psql prompt with the extension installed using docker # Build image docker build -t pg_graphql -f Dockerfile . -# Run container +# Run container docker run --rm --name pg_gql -p 5085:5432 -d -e POSTGRES_DB=gqldb -e POSTGRES_PASSWORD=password -e POSTGRES_USER=postgres -d pg_graphql # Attach to container docker exec -it pg_gql psql -U postgres gqldb ``` -Now we'll create the extension, and create a test table with some data to query +Now we'll create the extension, and create a test table with some data to query ```sql gqldb= create extension pg_graphql; @@ -60,7 +60,7 @@ CREATE EXTENSION gqldb= create table book(id int primary key, title text); CREATE TABLE -gqldb= insert into book(id, title) values (1, 'book 1'); +gqldb= insert into book(id, title) values (1, 'book 1'); INSERT 0 1 ``` @@ -74,7 +74,7 @@ query { } } $$); - execute + execute ---------------------------------- {"data": {"book": {"book_id": 1, "title": "book 1"}}} ``` @@ -119,7 +119,7 @@ $$); Requires: - Python 3.6+ -- Docker +- Docker ```shell pip intall -e . diff --git a/setup.py b/setup.py index a8eeaaa2..1cfc574f 100644 --- a/setup.py +++ b/setup.py @@ -36,8 +36,8 @@ def check_python_version(): setuptools.setup( name="pg_graphql", version="0.0.1", - #packages=setuptools.find_packages("src/python", exclude=("tests",)), - #package_dir={"": "src/python"}, + # packages=setuptools.find_packages("src/python", exclude=("tests",)), + # package_dir={"": "src/python"}, classifiers=[ "Development Status :: 4 - Beta", "Natural Language :: English", diff --git a/sql/pg_graphql--0.1.sql b/sql/pg_graphql--0.1.sql index 53fe9f6e..9b3a3341 100644 --- a/sql/pg_graphql--0.1.sql +++ b/sql/pg_graphql--0.1.sql @@ -16,10 +16,10 @@ as $$ /* Recursively remove a key from a jsonb object by name */ - select - case - when jsonb_typeof(body) = 'object' then - ( + select + case + when jsonb_typeof(body) = 'object' then + ( select jsonb_object_agg(key_, gql._recursive_strip_key(value_)) from @@ -29,8 +29,8 @@ Recursively remove a key from a jsonb object by name limit 1 ) - when jsonb_typeof(body) = 'array' then - ( + when jsonb_typeof(body) = 'array' then + ( select jsonb_agg(gql._recursive_strip_key(value_)) from @@ -40,7 +40,7 @@ Recursively remove a key from a jsonb object by name ) else body - end; + end; $$; @@ -94,7 +94,7 @@ as $$ ] } */ - select + select gql._recursive_strip_key( body:=gql._parse(query)::jsonb, key:='loc' @@ -160,7 +160,7 @@ declare -- AST for the first OperationDescription ast_op jsonb; ast_op_kind text; - + sql_template text; -- Operation @@ -174,7 +174,7 @@ declare kind text; name_kind text; name_value text; - alias text; + alias text; -- Extracted from arguments filters jsonb; @@ -201,7 +201,7 @@ declare work_ix int; result jsonb; - + begin ast = gql.parse(query); @@ -232,7 +232,7 @@ begin -- Table name -- TODO sanitize - + */ result_alias = gql.get_alias(selection); @@ -260,7 +260,7 @@ begin select_clause = ( select_clause || quote_ident(field_col) - || ' as ' + || ' as ' || coalesce(quote_ident(field_alias), quote_ident(field_col)) ); end loop; @@ -288,7 +288,7 @@ begin ); end loop; - execute $c$ + execute $c$ with rec as ( select $c$ || select_clause || $c$ from $c$ || quote_ident(table_name) || $c$ @@ -324,74 +324,74 @@ create type gql.cardinality as enum ('ONE', 'MANY'); create function gql.to_regclass(schema_ text, name_ text) - returns regclass - language sql - immutable + returns regclass + language sql + immutable as $$ select (quote_ident(schema_) || '.' || quote_ident(name_))::regclass; $$; create function gql.to_table_name(regclass) - returns text - language sql - immutable + returns text + language sql + immutable as $$ select coalesce(nullif(split_part($1::text, '.', 2), ''), $1::text) $$; create function gql.to_pkey_column_names(regclass) - returns text[] - language sql - stable + returns text[] + language sql + stable as $$ - select - coalesce(array_agg(pga.attname), '{}') - from - pg_index i - join pg_attribute pga - on pga.attrelid = i.indrelid - and pga.attnum = any(i.indkey) - where - i.indrelid = $1::regclass - and i.indisprimary; + select + coalesce(array_agg(pga.attname), '{}') + from + pg_index i + join pg_attribute pga + on pga.attrelid = i.indrelid + and pga.attnum = any(i.indkey) + where + i.indrelid = $1::regclass + and i.indisprimary; $$; create function gql.to_pascal_case(text) - returns text - language sql - immutable + returns text + language sql + immutable as $$ -select - string_agg(initcap(part), '') +select + string_agg(initcap(part), '') from - unnest(string_to_array($1, '_')) x(part) + unnest(string_to_array($1, '_')) x(part) $$; create function gql.to_camel_case(text) - returns text - language sql - immutable + returns text + language sql + immutable as $$ -select - string_agg( - case - when part_ix = 1 then part - else initcap(part) - end, '') +select + string_agg( + case + when part_ix = 1 then part + else initcap(part) + end, '') from - unnest(string_to_array($1, '_')) with ordinality x(part, part_ix) + unnest(string_to_array($1, '_')) with ordinality x(part, part_ix) $$; create table gql.entity ( - --id integer generated always as identity primary key, - entity regclass primary key, - is_disabled boolean default false + --id integer generated always as identity primary key, + entity regclass primary key, + is_disabled boolean default false ); @@ -400,23 +400,23 @@ create or replace view gql.relationship as select gql.to_regclass(table_schema::text, table_name::text) entity, constraint_name::text, - table_schema::text as table_schema, + table_schema::text as table_schema, array_agg(column_name::text) column_names from - gql.entity ge - join information_schema.constraint_column_usage ccu - on ge.entity = gql.to_regclass(table_schema::text, table_name::text) + gql.entity ge + join information_schema.constraint_column_usage ccu + on ge.entity = gql.to_regclass(table_schema::text, table_name::text) group by table_schema, table_name, constraint_name ), - directional as ( - select + directional as ( + select tc.constraint_name::text, - gql.to_regclass(tc.table_schema::text, tc.table_name::text) local_entity, + gql.to_regclass(tc.table_schema::text, tc.table_name::text) local_entity, array_agg(kcu.column_name) local_columns, 'MANY'::gql.cardinality as local_cardinality, - ccu.entity foreign_entity, + ccu.entity foreign_entity, ccu.column_names::text[] as foreign_columns, 'ONE'::gql.cardinality as foreign_cardinality from @@ -442,11 +442,11 @@ create or replace view gql.relationship as union all select constraint_name, - foreign_entity as local_entity, - foreign_columns as local_columns, + foreign_entity as local_entity, + foreign_columns as local_columns, foreign_cardinality as local_cardinality, - local_entity as foreign_entity, - local_columns as foreign_columns, + local_entity as foreign_entity, + local_columns as foreign_columns, local_cardinality as foreign_cardinality from directional; @@ -454,131 +454,131 @@ create or replace view gql.relationship as create type gql.type_type as enum('Scalar', 'Node', 'Edge', 'Connection', 'PageInfo', 'Object', 'Enum'); create table gql.type ( - id integer generated always as identity primary key, - name text not null unique, - type_type gql.type_type not null, - entity regclass references gql.entity(entity), - is_disabled boolean not null default false, - is_builtin boolean not null default false, - enum_variants text[], + id integer generated always as identity primary key, + name text not null unique, + type_type gql.type_type not null, + entity regclass references gql.entity(entity), + is_disabled boolean not null default false, + is_builtin boolean not null default false, + enum_variants text[], check ( type_type != 'Enum' and enum_variants is null or type_type = 'Enum' and enum_variants is not null ), - unique (type_type, entity) + unique (type_type, entity) ); create function gql.type_id_by_name(text) - returns int - language sql + returns int + language sql as $$ select id from gql.type where name = $1; $$; create table gql.field ( - id integer generated always as identity primary key, - parent_type_id integer not null references gql.type(id), - type_id integer not null references gql.type(id), - name text not null, - is_not_null boolean, - is_array boolean default false, - is_array_not_null boolean, - is_disabled boolean default false, - -- TODO trigger check column name only non-null when type is scalar - column_name text, - -- Relationships - local_columns text[], - foreign_columns text[], - -- Names must be unique on each type - unique(parent_type_id, name), - -- Upsert key - unique(parent_type_id, column_name), - -- is_array_not_null only set if is_array is true - check ( - (not is_array and is_array_not_null is null) - or (is_array and is_array_not_null is not null) - ), - -- Only column fields and total can be disabled - check ( - not is_disabled - or column_name is not null - or name = 'totalCount' - ) + id integer generated always as identity primary key, + parent_type_id integer not null references gql.type(id), + type_id integer not null references gql.type(id), + name text not null, + is_not_null boolean, + is_array boolean default false, + is_array_not_null boolean, + is_disabled boolean default false, + -- TODO trigger check column name only non-null when type is scalar + column_name text, + -- Relationships + local_columns text[], + foreign_columns text[], + -- Names must be unique on each type + unique(parent_type_id, name), + -- Upsert key + unique(parent_type_id, column_name), + -- is_array_not_null only set if is_array is true + check ( + (not is_array and is_array_not_null is null) + or (is_array and is_array_not_null is not null) + ), + -- Only column fields and total can be disabled + check ( + not is_disabled + or column_name is not null + or name = 'totalCount' + ) ); create function gql.sql_type_to_gql_type(sql_type text) - returns int - language sql + returns int + language sql as $$ - -- SQL type from information_schema.columns.data_type - select - case - when sql_type like 'int%' then gql.type_id_by_name('Int') - when sql_type like 'bool%' then gql.type_id_by_name('Boolean') - when sql_type like 'float%' then gql.type_id_by_name('Float') - when sql_type like 'numeric%' then gql.type_id_by_name('Float') - when sql_type like 'json%' then gql.type_id_by_name('JSON') - when sql_type = 'uuid' then gql.type_id_by_name('UUID') - when sql_type like 'date%' then gql.type_id_by_name('DateTime') - when sql_type like 'timestamp%' then gql.type_id_by_name('DateTime') - else gql.type_id_by_name('String') - end; + -- SQL type from information_schema.columns.data_type + select + case + when sql_type like 'int%' then gql.type_id_by_name('Int') + when sql_type like 'bool%' then gql.type_id_by_name('Boolean') + when sql_type like 'float%' then gql.type_id_by_name('Float') + when sql_type like 'numeric%' then gql.type_id_by_name('Float') + when sql_type like 'json%' then gql.type_id_by_name('JSON') + when sql_type = 'uuid' then gql.type_id_by_name('UUID') + when sql_type like 'date%' then gql.type_id_by_name('DateTime') + when sql_type like 'timestamp%' then gql.type_id_by_name('DateTime') + else gql.type_id_by_name('String') + end; $$; create function gql.build_schema() - returns void - language plpgsql + returns void + language plpgsql as $$ begin - truncate table gql.field cascade; - truncate table gql.type cascade; - truncate table gql.entity cascade; - - insert into gql.entity(entity, is_disabled) - select - gql.to_regclass(schemaname, tablename) entity, - false is_disabled - from - pg_tables pgt - where - schemaname not in ('information_schema', 'pg_catalog', 'gql'); - - - -- Constants - insert into gql.type (name, type_type, is_builtin) - values - ('ID', 'Scalar', true), - ('Int', 'Scalar', true), - ('Float', 'Scalar', true), - ('String', 'Scalar', true), - ('Boolean', 'Scalar', true), - ('DateTime', 'Scalar', false), - ('BigInt', 'Scalar', false), - ('UUID', 'Scalar', false), - ('JSON', 'Scalar', false), - ('Query', 'Object', false), - ('Mutation', 'Object', false), - ('PageInfo', 'PageInfo', false); - -- Node Types - -- TODO snake case to camel case to handle underscores - insert into gql.type (name, type_type, entity, is_disabled, is_builtin) - select gql.to_pascal_case(gql.to_table_name(entity)), 'Node', entity, false, false - from gql.entity; - -- Edge Types - insert into gql.type (name, type_type, entity, is_disabled, is_builtin) - select gql.to_pascal_case(gql.to_table_name(entity)) || 'Edge', 'Edge', entity, false, false - from gql.entity; - -- Connection Types - insert into gql.type (name, type_type, entity, is_disabled, is_builtin) - select gql.to_pascal_case(gql.to_table_name(entity)) || 'Connection', 'Connection', entity, false, false - from gql.entity; + truncate table gql.field cascade; + truncate table gql.type cascade; + truncate table gql.entity cascade; + + insert into gql.entity(entity, is_disabled) + select + gql.to_regclass(schemaname, tablename) entity, + false is_disabled + from + pg_tables pgt + where + schemaname not in ('information_schema', 'pg_catalog', 'gql'); + + + -- Constants + insert into gql.type (name, type_type, is_builtin) + values + ('ID', 'Scalar', true), + ('Int', 'Scalar', true), + ('Float', 'Scalar', true), + ('String', 'Scalar', true), + ('Boolean', 'Scalar', true), + ('DateTime', 'Scalar', false), + ('BigInt', 'Scalar', false), + ('UUID', 'Scalar', false), + ('JSON', 'Scalar', false), + ('Query', 'Object', false), + ('Mutation', 'Object', false), + ('PageInfo', 'PageInfo', false); + -- Node Types + -- TODO snake case to camel case to handle underscores + insert into gql.type (name, type_type, entity, is_disabled, is_builtin) + select gql.to_pascal_case(gql.to_table_name(entity)), 'Node', entity, false, false + from gql.entity; + -- Edge Types + insert into gql.type (name, type_type, entity, is_disabled, is_builtin) + select gql.to_pascal_case(gql.to_table_name(entity)) || 'Edge', 'Edge', entity, false, false + from gql.entity; + -- Connection Types + insert into gql.type (name, type_type, entity, is_disabled, is_builtin) + select gql.to_pascal_case(gql.to_table_name(entity)) || 'Connection', 'Connection', entity, false, false + from gql.entity; -- Enum Types - insert into gql.type (name, type_type, is_disabled, is_builtin, enum_variants) + insert into gql.type (name, type_type, is_disabled, is_builtin, enum_variants) select gql.to_pascal_case(t.typname) as name, 'Enum' as type_type, @@ -598,164 +598,163 @@ begin t.typname; - -- PageInfo - insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) - values - (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('Boolean'), 'hasPreviousPage', true, false, null, null), - (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('Boolean'), 'hasNextPage', true, false, null, null), - (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('String'), 'startCursor', true, false, null, null), - (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('String'), 'endCursor', true, false, null, null); - - -- Edges - insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) - -- Edge.node: - select - edge.id parent_type_id, - node.id type_id, - 'node' as name, - false is_not_null, - false is_array, - null::boolean is_array_not_null, - null::text as column_name - from - gql.type edge - join gql.type node - on edge.entity = node.entity - where - edge.type_type = 'Edge' - and node.type_type = 'Node' - union all - -- Edge.cursor - select - edge.id, gql.type_id_by_name('String'), 'cursor', true, false, null, null - from - gql.type edge - where - edge.type_type = 'Edge'; - - -- Connection - insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) - -- Connection.edges: - select - conn.id parent_type_id, - edge.id type_id, - 'edges' as name, - false is_not_null, - true is_array, - false::boolean is_array_not_null, - null::text as column_name - from - gql.type conn - join gql.type edge - on conn.entity = edge.entity - where - conn.type_type = 'Connection' - and edge.type_type = 'Edge' - union all - -- Connection.pageInfo - select conn.id, gql.type_id_by_name('PageInfo'), 'pageInfo', true, false, null, null - from gql.type conn - where conn.type_type = 'Connection' - union all - -- Connection.totalCount (disabled by default) - select conn.id, gql.type_id_by_name('Int'), 'totalCount', true, false, null, null - from gql.type conn - where conn.type_type = 'Connection'; - - - -- Node - insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) - -- Node. - select - gt.id parent_type_id, - case - -- Detect ID! types using pkey info, restricted by types - when c.column_name = 'id' and array[c.column_name::text] = gql.to_pkey_column_names(ent.entity) - then gql.type_id_by_name('ID') - -- substring removes the underscore prefix from array types - when c.data_type = 'ARRAY' then gql.sql_type_to_gql_type(substring(udt_name, 2, 100)) - else gql.sql_type_to_gql_type(c.data_type) - end type_id, - gql.to_camel_case(c.column_name::text) as name, - case when c.data_type = 'ARRAY' then false else c.is_nullable = 'NO' end as is_not_null, - case when c.data_type = 'ARRAY' then true else false end is_array, - case when c.data_type = 'ARRAY' then c.is_nullable = 'NO' else null end is_array_not_null, - c.column_name::text as column_name - from - gql.entity ent - join gql.type gt - on ent.entity = gt.entity - join information_schema.role_column_grants rcg - on ent.entity = gql.to_regclass(rcg.table_schema, rcg.table_name) - join information_schema.columns c - on rcg.table_schema = c.table_schema - and rcg.table_name = c.table_name - and rcg.column_name = c.column_name - - where - gt.type_type = 'Node' - -- INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER - and rcg.privilege_type = 'SELECT' - and ( - -- Use access level of current role - rcg.grantee = current_setting('role') - -- If superuser, allow everything - or current_setting('role') = 'none' - ) - order by - ent.entity, c.ordinal_position; - - -- Node - insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, local_columns, foreign_columns) - -- Node. - select - node.id parent_type_id, - conn.id type_id, - case - -- owner_id -> owner - when ( - array_length(rel.local_columns, 1) = 1 - and rel.local_columns[1] like '%_id' - and rel.foreign_cardinality = 'ONE' - and gql.to_camel_case(left(rel.local_columns[1], -3)) not in (select name from gql.field where parent_type_id = node.id) - ) then gql.to_camel_case(left(rel.local_columns[1], -3)) - when ( - rel.foreign_cardinality = 'ONE' - and gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) not in (select name from gql.field where parent_type_id = node.id) - ) then gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) - when ( - rel.foreign_cardinality = 'MANY' - and gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) not in (select name from gql.field where parent_type_id = node.id) - ) then gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) || 's' - else gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) || 'RequiresNameOverride' - end, - -- todo - false as is_not_null, - case - when rel.foreign_cardinality = 'MANY' then true - else false - end as is_array, - case - when rel.foreign_cardinality = 'MANY' then false - else null - end as is_array_not_null, - rel.local_columns, - rel.foreign_columns - from - gql.type node - join gql.relationship rel - on node.entity = rel.local_entity - join gql.type conn - on conn.entity = rel.foreign_entity - where - node.type_type = 'Node' - and conn.type_type = 'Connection' - order by - rel.local_entity, local_columns; + -- PageInfo + insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) + values + (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('Boolean'), 'hasPreviousPage', true, false, null, null), + (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('Boolean'), 'hasNextPage', true, false, null, null), + (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('String'), 'startCursor', true, false, null, null), + (gql.type_id_by_name('PageInfo'), gql.type_id_by_name('String'), 'endCursor', true, false, null, null); + + -- Edges + insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) + -- Edge.node: + select + edge.id parent_type_id, + node.id type_id, + 'node' as name, + false is_not_null, + false is_array, + null::boolean is_array_not_null, + null::text as column_name + from + gql.type edge + join gql.type node + on edge.entity = node.entity + where + edge.type_type = 'Edge' + and node.type_type = 'Node' + union all + -- Edge.cursor + select + edge.id, gql.type_id_by_name('String'), 'cursor', true, false, null, null + from + gql.type edge + where + edge.type_type = 'Edge'; + + -- Connection + insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) + -- Connection.edges: + select + conn.id parent_type_id, + edge.id type_id, + 'edges' as name, + false is_not_null, + true is_array, + false::boolean is_array_not_null, + null::text as column_name + from + gql.type conn + join gql.type edge + on conn.entity = edge.entity + where + conn.type_type = 'Connection' + and edge.type_type = 'Edge' + union all + -- Connection.pageInfo + select conn.id, gql.type_id_by_name('PageInfo'), 'pageInfo', true, false, null, null + from gql.type conn + where conn.type_type = 'Connection' + union all + -- Connection.totalCount (disabled by default) + select conn.id, gql.type_id_by_name('Int'), 'totalCount', true, false, null, null + from gql.type conn + where conn.type_type = 'Connection'; + + + -- Node + insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, column_name) + -- Node. + select + gt.id parent_type_id, + case + -- Detect ID! types using pkey info, restricted by types + when c.column_name = 'id' and array[c.column_name::text] = gql.to_pkey_column_names(ent.entity) + then gql.type_id_by_name('ID') + -- substring removes the underscore prefix from array types + when c.data_type = 'ARRAY' then gql.sql_type_to_gql_type(substring(udt_name, 2, 100)) + else gql.sql_type_to_gql_type(c.data_type) + end type_id, + gql.to_camel_case(c.column_name::text) as name, + case when c.data_type = 'ARRAY' then false else c.is_nullable = 'NO' end as is_not_null, + case when c.data_type = 'ARRAY' then true else false end is_array, + case when c.data_type = 'ARRAY' then c.is_nullable = 'NO' else null end is_array_not_null, + c.column_name::text as column_name + from + gql.entity ent + join gql.type gt + on ent.entity = gt.entity + join information_schema.role_column_grants rcg + on ent.entity = gql.to_regclass(rcg.table_schema, rcg.table_name) + join information_schema.columns c + on rcg.table_schema = c.table_schema + and rcg.table_name = c.table_name + and rcg.column_name = c.column_name + + where + gt.type_type = 'Node' + -- INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER + and rcg.privilege_type = 'SELECT' + and ( + -- Use access level of current role + rcg.grantee = current_setting('role') + -- If superuser, allow everything + or current_setting('role') = 'none' + ) + order by + ent.entity, c.ordinal_position; + + -- Node + insert into gql.field(parent_type_id, type_id, name, is_not_null, is_array, is_array_not_null, local_columns, foreign_columns) + -- Node. + select + node.id parent_type_id, + conn.id type_id, + case + -- owner_id -> owner + when ( + array_length(rel.local_columns, 1) = 1 + and rel.local_columns[1] like '%_id' + and rel.foreign_cardinality = 'ONE' + and gql.to_camel_case(left(rel.local_columns[1], -3)) not in (select name from gql.field where parent_type_id = node.id) + ) then gql.to_camel_case(left(rel.local_columns[1], -3)) + when ( + rel.foreign_cardinality = 'ONE' + and gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) not in (select name from gql.field where parent_type_id = node.id) + ) then gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) + when ( + rel.foreign_cardinality = 'MANY' + and gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) not in (select name from gql.field where parent_type_id = node.id) + ) then gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) || 's' + else gql.to_camel_case(gql.to_table_name(rel.foreign_entity)) || 'RequiresNameOverride' + end, + -- todo + false as is_not_null, + case + when rel.foreign_cardinality = 'MANY' then true + else false + end as is_array, + case + when rel.foreign_cardinality = 'MANY' then false + else null + end as is_array_not_null, + rel.local_columns, + rel.foreign_columns + from + gql.type node + join gql.relationship rel + on node.entity = rel.local_entity + join gql.type conn + on conn.entity = rel.foreign_entity + where + node.type_type = 'Node' + and conn.type_type = 'Connection' + order by + rel.local_entity, local_columns; end; $$; grant all on schema gql to postgres; grant all on all tables in schema gql to postgres; - diff --git a/test/conftest.py b/test/conftest.py index aa2dd963..ed3e0481 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -2,9 +2,8 @@ import json import os import subprocess -from pathlib import Path -from flupy import walk_files import time +from pathlib import Path import pytest import sqlalchemy @@ -72,7 +71,7 @@ def dockerize_database(): def engine(dockerize_database): eng = create_engine(f"postgresql://postgres:password@localhost:{PORT}/{DB_NAME}") - path = Path('test/setup.sql') + path = Path("test/setup.sql") contents = path.read_text() with eng.connect() as conn: conn.execute(text(contents)) diff --git a/test/setup.sql b/test/setup.sql index 6ca40b59..64aca594 100644 --- a/test/setup.sql +++ b/test/setup.sql @@ -1,21 +1,21 @@ create extension "uuid-ossp"; create table account( - id uuid not null default uuid_generate_v4() primary key, - email varchar(255) not null, - encrypted_password varchar(255) not null, - created_at timestamp not null, - updated_at timestamp not null + id uuid not null default uuid_generate_v4() primary key, + email varchar(255) not null, + encrypted_password varchar(255) not null, + created_at timestamp not null, + updated_at timestamp not null ); create table blog( - id uuid not null default uuid_generate_v4() primary key, + id uuid not null default uuid_generate_v4() primary key, owner_id uuid not null references account(id), - name varchar(255) not null, + name varchar(255) not null, description varchar(255), - created_at timestamp not null, - updated_at timestamp not null + created_at timestamp not null, + updated_at timestamp not null ); @@ -23,11 +23,11 @@ create type blog_post_status as enum ('PENDING', 'RELEASED'); create table blog_post( - id uuid not null default uuid_generate_v4() primary key, + id uuid not null default uuid_generate_v4() primary key, blog_id uuid not null references blog(id), - title varchar(255) not null, + title varchar(255) not null, body varchar(10000), status blog_post_status not null, - created_at timestamp not null, - updated_at timestamp not null + created_at timestamp not null, + updated_at timestamp not null ); diff --git a/test/test_component.py b/test/test_component.py index bd825cf4..8f1b2101 100644 --- a/test/test_component.py +++ b/test/test_component.py @@ -1,21 +1,20 @@ import json -from sqlalchemy import func, select, text +from sqlalchemy import func, select def test_get_name(sess): - selection = json.dumps({ - "kind": "Field", - "name": { - "kind": "Name", - "value": "hello" - }, - "alias": None, - "arguments": None, - "directives": None, - "selectionSet": None - }) + selection = json.dumps( + { + "kind": "Field", + "name": {"kind": "Name", "value": "hello"}, + "alias": None, + "arguments": None, + "directives": None, + "selectionSet": None, + } + ) (result,) = sess.execute(select([func.gql.get_name(selection)])).fetchone() @@ -24,20 +23,16 @@ def test_get_name(sess): def test_get_alias(sess): - selection = json.dumps({ - "kind": "Field", - "name": { - "kind": "Name", - "value": "hello" - }, - "alias": { - "kind": "Name", - "value": "hello_alias" - }, - "arguments": None, - "directives": None, - "selectionSet": None - }) + selection = json.dumps( + { + "kind": "Field", + "name": {"kind": "Name", "value": "hello"}, + "alias": {"kind": "Name", "value": "hello_alias"}, + "arguments": None, + "directives": None, + "selectionSet": None, + } + ) (result,) = sess.execute(select([func.gql.get_alias(selection)])).fetchone() @@ -46,17 +41,16 @@ def test_get_alias(sess): def test_get_alias_defaults_to_name(sess): - selection = json.dumps({ - "kind": "Field", - "name": { - "kind": "Name", - "value": "hello" - }, - "alias": None, - "arguments": None, - "directives": None, - "selectionSet": None - }) + selection = json.dumps( + { + "kind": "Field", + "name": {"kind": "Name", "value": "hello"}, + "alias": None, + "arguments": None, + "directives": None, + "selectionSet": None, + } + ) (result,) = sess.execute(select([func.gql.get_alias(selection)])).fetchone() diff --git a/test/test_execute.py b/test/test_execute.py index 5e227165..303c453e 100644 --- a/test/test_execute.py +++ b/test/test_execute.py @@ -1,7 +1,5 @@ -import json - -from sqlalchemy import func, select, text import pytest +from sqlalchemy import func, select, text def setup_data(sess) -> None: @@ -149,8 +147,3 @@ def test_execute_experimental(sess): (result,) = sess.execute(select([func.gql.execute(query)])).fetchone() assert result["data"] == {"xXx": {"book_id": 2}} - - - - - diff --git a/test/test_inspection.py b/test/test_inspection.py index 1005b8c2..c1f9f108 100644 --- a/test/test_inspection.py +++ b/test/test_inspection.py @@ -1,12 +1,6 @@ -import json - -from sqlalchemy import func, select, text -import pytest - - def test_execute_simple(sess): - import pdb; pdb.set_trace() - #pass - + import pdb + pdb.set_trace() + # pass diff --git a/test/test_parse.py b/test/test_parse.py index 3a490a0e..4bb536a7 100644 --- a/test/test_parse.py +++ b/test/test_parse.py @@ -1,6 +1,6 @@ import json -from sqlalchemy import func, select, text +from sqlalchemy import func, select def test_parse(sess):