From 05afd4f839ab54d25d399ab70661a4358b0ce755 Mon Sep 17 00:00:00 2001 From: Dimitrie Stefanescu Date: Wed, 13 May 2020 14:19:46 +0100 Subject: [PATCH] fix(tests & queries): added graphql handlers for object children, tests, and refactoring Graphql: --- modules/core/graph/resolvers/objects.js | 16 +- modules/core/graph/schemas/objects.graphql | 12 +- modules/core/migrations/000-core.js | 16 +- modules/core/objects/services.js | 68 ++++--- modules/core/tests/graph.spec.js | 174 +++++++++++++++++- modules/core/tests/objects.spec.js | 20 +- modules/core/tests/sampleObjectData.js | 22 +-- test-queries/closure-two-stage.sql | 2 +- test-queries/materialised-fullcount.sql | 2 +- .../materialised-simple-ordinality.sql | 2 +- test-queries/materialised-simple.sql | 2 +- 11 files changed, 263 insertions(+), 73 deletions(-) diff --git a/modules/core/graph/resolvers/objects.js b/modules/core/graph/resolvers/objects.js index 272567cbc2..6a64ac7292 100644 --- a/modules/core/graph/resolvers/objects.js +++ b/modules/core/graph/resolvers/objects.js @@ -3,7 +3,7 @@ const root = require( 'app-root-path' ) const { AuthorizationError, ApolloError } = require( 'apollo-server-express' ) const { validateScopes, authorizeResolver } = require( `${root}/modules/shared` ) const { getUser } = require( '../../users/services' ) -const { createCommit, getCommitsByStreamId, createObject, createObjects, getObject, getObjects } = require( '../../objects/services' ) +const { createCommit, getCommitsByStreamId, createObject, createObjects, getObject, getObjects, getObjectChildren, getObjectChildrenQuery } = require( '../../objects/services' ) const { createTag, updateTag, getTagById, deleteTagById, getTagsByStreamId, createBranch, updateBranch, getBranchById, getBranchCommits, deleteBranchById, getBranchesByStreamId, getStreamReferences } = require( '../../references/services' ) module.exports = { @@ -41,11 +41,19 @@ module.exports = { return await getUser( parent.author ) }, async children( parent, args, context, info ) { - console.log( parent.totalChildrenCount ) - console.log( args ) + // console.log( parent ) + // console.log( args ) + if ( !args.query && !args.orderBy ) { + // Simple query + let result = await getObjectChildren( { objectId: parent.id, limit: args.limit, depth: args.depth, select: args.select, cursor: args.cursor } ) + return { totalCount: parent.totalChildrenCount, cursor: result.cursor, objects: result.objects } + } - throw new ApolloError( 'Not implemented' ) + // Comlex query + let result = await getObjectChildrenQuery( { objectId: parent.id, limit: args.limit, depth: args.depth, select: args.select, query: args.query, orderBy: args.orderBy, cursor: args.cursor } ) + return result + // throw new ApolloError( 'Not implemented' ) } }, Tag: { diff --git a/modules/core/graph/schemas/objects.graphql b/modules/core/graph/schemas/objects.graphql index 1dbf17794b..440179c0e9 100644 --- a/modules/core/graph/schemas/objects.graphql +++ b/modules/core/graph/schemas/objects.graphql @@ -27,6 +27,7 @@ type Object { speckleType: String! applicationId: String createdAt: DateTime + totalChildrenCount: Int """ The object's description. Valid only in the case of commit objects. """ @@ -49,7 +50,7 @@ type Object { data: JSON """ - Any objects that this object references. + Get any objects that this object references. In the case of commits, this will give you a commit's constituent objects. **NOTE**: Providing any of the two last arguments ( `query`, `orderBy` ) will trigger a different code branch that executes a much more expensive SQL query. It is not recommended to do so for basic clients that are interested in purely getting all the objects of a given commit. """ @@ -58,13 +59,8 @@ type Object { depth: Int! = 50, select: [String], cursor: String, - query: String, - orderBy: String ): ObjectCollection! - - """ - Query the object's childern, so you can receive only the ones you want to. - """ - childernQuery(limit: Int! = 100, cursor:String, select: [String], depth: Int! = 1 ): ObjectCollection! + query: [JSONObject!], + orderBy: JSONObject ): ObjectCollection! } type ObjectCollection { diff --git a/modules/core/migrations/000-core.js b/modules/core/migrations/000-core.js index 3f14a91db0..f7a95fca46 100644 --- a/modules/core/migrations/000-core.js +++ b/modules/core/migrations/000-core.js @@ -61,21 +61,26 @@ exports.up = async knex => { table.specificType( 'role', 'speckle_acl_role_type' ).defaultTo( 'write' ) } ) - // Objects Table + // Objects Table. + // First class citizen properties are: + // id - the object's hash + // totalChildrenCount - how many subchildren, regardless of depth, this object has + // data - the jsonb object + // author - commit specific field + // description - commit specific field + // createdAt - date of insertion await knex.schema.createTable( 'objects', table => { table.string( 'id' ).primary( ) - table.string( 'speckle_type' ).defaultTo( 'Base' ).notNullable( ) - table.string( 'applicationId' ) + table.string( 'speckleType' ).defaultTo( 'Base' ).notNullable( ) table.integer( 'totalChildrenCount' ) table.jsonb( 'totalChildrenCountByDepth' ) table.jsonb( 'data' ) table.string( 'author', 10 ).references( 'id' ).inTable( 'users' ) table.string( 'description' ) table.timestamp( 'createdAt' ).defaultTo( knex.fn.now( ) ) - table.index( [ 'speckle_type' ], 'type_index' ) } ) - // Tree inheritance tracker + // Tree inheritance tracker (materialised path) await knex.schema.createTable( 'object_tree_refs', table => { table.increments( 'id' ) table.string( 'parent' ).index( null, 'HASH' ) @@ -83,6 +88,7 @@ exports.up = async knex => { } ) await knex.raw( `CREATE INDEX tree_path_idx ON object_tree_refs USING gist(path)` ) + // Closure table for tracking the relationships we care about await knex.schema.createTable( 'object_children_closure', table => { table.string( 'parent' ).notNullable( ).index() table.string( 'child' ).notNullable( ).index() diff --git a/modules/core/objects/services.js b/modules/core/objects/services.js index db3e695e74..0956fefe12 100644 --- a/modules/core/objects/services.js +++ b/modules/core/objects/services.js @@ -24,7 +24,7 @@ module.exports = { */ async createCommit( streamId, userId, object ) { - object.speckle_type = 'commit' + object.speckleType = 'commit' object.author = userId let id = await module.exports.createObject( object ) @@ -48,15 +48,24 @@ module.exports = { let insertionObject = prepInsertionObject( object ) let closures = [ ] + let totalChildrenCountByDepth = {} if ( object.__closure !== null ) { for ( const prop in object.__closure ) { closures.push( { parent: insertionObject.id, child: prop, minDepth: object.__closure[ prop ] } ) + + if ( totalChildrenCountByDepth[ object.__closure[ prop ].toString( ) ] ) + totalChildrenCountByDepth[ object.__closure[ prop ].toString( ) ]++ + else + totalChildrenCountByDepth[ object.__closure[ prop ].toString( ) ] = 1 } } delete insertionObject.__tree delete insertionObject.__closure + insertionObject.totalChildrenCount = closures.length + insertionObject.totalChildrenCountByDepth = JSON.stringify( totalChildrenCountByDepth ) + let q1 = Objects( ).insert( insertionObject ).toString( ) + ' on conflict do nothing' await knex.raw( q1 ) @@ -148,19 +157,22 @@ module.exports = { let fullObjectSelect = false let selectStatements = [ ] - if ( select && select.length > 0 ) { - selectStatements.push( `jsonb_path_query(data, '$.id') as id` ) - select.forEach( f => { - selectStatements += `, jsonb_path_query(data, '$.${ f }') as "${f}"` + let q = Closures( ) + q.select( 'id' ) + q.select( 'createdAt' ) + q.select( 'speckleType' ) + q.select( 'totalChildrenCount' ) + + if ( Array.isArray( select ) ) { + select.forEach( ( field, index ) => { + q.select( knex.raw( 'jsonb_path_query(data, :path) as :name:', { path: "$." + field, name: '' + index } ) ) } ) } else { - selectStatements.push( '"data"' ) fullObjectSelect = true + q.select( 'data' ) } - let q = Closures( ) - .select( knex.raw( selectStatements ) ) - .rightJoin( 'objects', 'objects.id', 'object_children_closure.child' ) + q.rightJoin( 'objects', 'objects.id', 'object_children_closure.child' ) .where( knex.raw( 'parent = ?', [ objectId ] ) ) .andWhere( knex.raw( '"minDepth" < ?', [ depth ] ) ) .andWhere( knex.raw( 'id > ?', [ cursor ? cursor : '0' ] ) ) @@ -169,15 +181,18 @@ module.exports = { let rows = await q - if ( fullObjectSelect ) rows.forEach( ( o, i, arr ) => arr[ i ] = { ...o.data } ) - else rows.forEach( ( o, i, arr ) => { - let no = {} - for ( let key in o ) set( no, key, o[ key ] ) - arr[ i ] = no - } ) + if ( !fullObjectSelect ) + rows.forEach( ( o, i, arr ) => { + let no = { id: o.id, createdAt: o.createdAt, speckleType: o.speckleType, totalChildrenCount: o.totalChildrenCount, data: {} } + let k = 0 + for ( let field of select ) { + set( no.data, field, o[ k++ ] ) + } + arr[ i ] = no + } ) let lastId = rows[ rows.length - 1 ].id - return { rows, cursor: lastId } + return { objects: rows, cursor: lastId } }, // This query is inefficient on larger sets (n * 10k objects) as we need to return the total count on an arbitrarily (user) defined selection of objects. @@ -199,8 +214,8 @@ module.exports = { if ( orderBy && select.indexOf( orderBy.field ) === -1 ) { select.push( orderBy.field ) } - // always add the id! - if ( select.indexOf( 'id' ) === -1 ) select.unshift( 'id' ) + // // always add the id! + // if ( select.indexOf( 'id' ) === -1 ) select.unshift( 'id' ) } else { fullObjectSelect = true } @@ -208,10 +223,13 @@ module.exports = { let additionalIdOrderBy = orderBy.field !== 'id' let operatorsWhitelist = [ '=', '>', '>=', '<', '<=', '!=' ] - + let mainQuery = knex.with( 'objs', cteInnerQuery => { // always select the id cteInnerQuery.select( 'id' ).from( 'object_children_closure' ) + cteInnerQuery.select( 'createdAt' ) + cteInnerQuery.select( 'speckleType' ) + cteInnerQuery.select( 'totalChildrenCount' ) // if there are any select fields, add them if ( Array.isArray( select ) ) { @@ -303,9 +321,7 @@ module.exports = { mainQuery.limit( limit ) - // console.log( mainQuery.toString( ) ) - // console.log( '-----' ) - + // Finally, execute the query let rows = await mainQuery let totalCount = rows && rows.length > 0 ? parseInt( rows[ 0 ].total_count ) : 0 @@ -318,15 +334,16 @@ module.exports = { // OR reconstruct the object based on the provided select paths. else { rows.forEach( ( o, i, arr ) => { - let no = {} + let no = { id: o.id, createdAt: o.createdAt, speckleType: o.speckleType, totalChildrenCount: o.totalChildrenCount, data: {} } let k = 0 for ( let field of select ) { - set( no, field, o[ k++ ] ) + set( no.data, field, o[ k++ ] ) } arr[ i ] = no } ) } + // Assemble the cursor for an eventual next call cursor = cursor || {} let cursorObj = { field: cursor.field || orderBy.field, @@ -370,8 +387,7 @@ function prepInsertionObject( obj ) { return { data: stringifiedObj, // stored in jsonb column id: obj.id, - applicationId: obj.applicationId, - speckle_type: obj.speckle_type, + speckleType: obj.speckleType, description: obj.description, author: obj.author } diff --git a/modules/core/tests/graph.spec.js b/modules/core/tests/graph.spec.js index 04821fce6a..5471bb1d92 100644 --- a/modules/core/tests/graph.spec.js +++ b/modules/core/tests/graph.spec.js @@ -12,6 +12,7 @@ chai.use( chaiHttp ) const knex = require( `${root}/db/knex` ) const { createUser, createToken } = require( '../users/services' ) +const { createObject, createObjects } = require( '../objects/services' ) let addr @@ -22,6 +23,7 @@ describe( 'GraphQL API Core', ( ) => { // set up app & two basic users to ping pong permissions around before( async ( ) => { + await knex.migrate.rollback( ) await knex.migrate.latest( ) let { app } = await init( ) let { server } = await startHttp( app ) @@ -36,7 +38,7 @@ describe( 'GraphQL API Core', ( ) => { } ) after( async ( ) => { - await knex.migrate.rollback( ) + // await knex.migrate.rollback( ) testServer.close( ) } ) @@ -78,7 +80,7 @@ describe( 'GraphQL API Core', ( ) => { expect( res1.body.data.apiTokenCreate ).to.be.a( 'string' ) token1 = `Bearer ${res1.body.data.apiTokenCreate}` - + console.log( userA.token ) const res2 = await sendRequest( userA.token, { query: `mutation { apiTokenCreate(name:"Token 1", scopes: ["streams:write", "streams:read", "users:email"]) }` } ) token2 = `Bearer ${res2.body.data.apiTokenCreate}` @@ -432,6 +434,7 @@ describe( 'GraphQL API Core', ( ) => { expect( res.body.data.stream.branch.commits.totalCount ).to.equal( 2 ) } ) + it( 'should retrieve a stream tag', async ( ) => { const res = await sendRequest( userA.token, { query: `query { stream(id:"${ts1}") { tag(id:"${retrievedStream.tags.tags[0].id}") { name description commit { id description author { id name } } } } } ` } ) @@ -444,6 +447,7 @@ describe( 'GraphQL API Core', ( ) => { expect( res.body.data.stream.tag.commit.author.name ).to.equal( 'MiticÄ' ) } ) + it( 'should retrieve a stream commit', async ( ) => { const res = await sendRequest( userA.token, { query: `query { stream(id:"${ts1}") { commit(id:"${c2.id}") { id description data } } }` } ) expect( res ).to.be.json @@ -451,8 +455,128 @@ describe( 'GraphQL API Core', ( ) => { expect( res.body.data.stream.commit.description ).to.equal( 'test second commit' ) } ) - it( 'should retrieve commit/object children', async ( ) => { - assert.fail( 'not implemented yet' ) + + describe( 'Objects', ( ) => { + let myCommit + let myObjs + + before( async ( ) => { + let { commit, objs } = createManyObjects( 100, 'noise__' ) + myCommit = commit + myObjs = objs + } ) + + it( 'should create a commit with its object children', async ( ) => { + + const commitRes = await sendRequest( userA.token, { query: `mutation($commit:JSONObject!) { commitCreate(streamId:"${ts1}", commit:$commit) }`, variables: { commit: myCommit } } ) + + let commitId = commitRes.body.data.commitCreate + + expect( commitId ).to.be.a( 'string' ) + + const objsRes = await sendRequest( userA.token, { query: `mutation($objs:[JSONObject]!) { objectCreate(streamId:"${ts1}", objects: $objs) }`, variables: { objs: myObjs } } ) + + let objIds = objsRes.body.data.objectCreate + + expect( objIds.length ).to.equal( 100 ) + + } ) + + it( 'should get a commits objects', async ( ) => { + let first = await sendRequest( userA.token, { + query: ` + query { + stream(id:"${ts1}") { + id + name + commit( id:"${myCommit.id}" ) { + createdAt + author{ name } + children( limit: 2 ) { + totalCount + cursor + objects { + id + } + } + } + } + } + ` + } ) + + + expect( first ).to.be.json + expect( first.body.errors ).to.not.exist + expect( first.body.data.stream ).to.be.an( 'object' ) + expect( first.body.data.stream.commit ).to.be.an( 'object' ) + expect( first.body.data.stream.commit.children.objects.length ).to.equal( 2 ) + + let second = await sendRequest( userA.token, { + query: ` + query { + stream(id:"${ts1}") { + id + name + commit( id:"${myCommit.id}" ) { + createdAt + author{ name } + children( limit: 20, cursor: "${first.body.data.stream.commit.children.cursor}", select: ["sortValueA", "nest.arr[2]"] ) { + totalCount + objects { + id + data + } + } + } + } + } + ` + } ) + + expect( second ).to.be.json + expect( second.body.errors ).to.not.exist + expect( second.body.data.stream ).to.be.an( 'object' ) + expect( second.body.data.stream.commit ).to.be.an( 'object' ) + expect( second.body.data.stream.commit.children.objects.length ).to.equal( 20 ) + expect( second.body.data.stream.commit.children.objects[ 0 ].data.sortValueA ).to.equal( 52 ) // when sorting by id, it's always 52 + expect( second.body.data.stream.commit.children.objects[ 0 ].data.nest.arr[ 2 ] ).to.equal( 52 ) // when sorting by id, it's always 52 + } ) + + it( 'should query a commits objects', async ( ) => { + let first = await sendRequest( userA.token, { + query: ` + query( $query: [JSONObject!], $orderBy: JSONObject ) { + stream(id:"${ts1}") { + id + name + commit( id:"${myCommit.id}" ) { + createdAt + author{ name } + children( limit: 20, select:[ "sortValueA" ], query: $query, orderBy: $orderBy ) { + totalCount + cursor + objects { + id + data + } + } + } + } + } + `, + variables: { query: [ { field: 'sortValueA', operator: '>=', value: 42 } ], orderBy: { field: 'sortValueA' } } + } ) + + expect( first ).to.be.json + expect( first.body.errors ).to.not.exist + expect( first.body.data.stream ).to.be.an( 'object' ) + expect( first.body.data.stream.commit ).to.be.an( 'object' ) + expect( first.body.data.stream.commit.children.objects.length ).to.equal( 20 ) + expect( first.body.data.stream.commit.children.objects[0].data.sortValueA).to.equal( 42 ) + expect( first.body.data.stream.commit.children.objects[1].data.sortValueA).to.equal( 43 ) + } ) + } ) } ) @@ -466,4 +590,46 @@ describe( 'GraphQL API Core', ( ) => { */ function sendRequest( auth, obj ) { return chai.request( addr ).post( '/graphql' ).set( 'Authorization', auth ).send( obj ) +} + +// const crypto = require( 'crypto' ) + +function createManyObjects( shitTon, noise ) { + shitTon = shitTon || 10000 + noise = noise || Math.random( ) * 100 + + let objs = [ ] + + let base = { name: 'base bastard 2', noise: noise, __closure: {} } + // objs.push( base ) + let k = 0 + + for ( let i = 0; i < shitTon; i++ ) { + let baby = { + name: `mr. ${i}`, + nest: { duck: i % 2 === 0, mallard: 'falsey', arr: [ i + 42, i, i ] }, + test: { value: i, secondValue: 'mallard ' + i % 10 }, + similar: k, + even: i % 2 === 0, + objArr: [ { a: i }, { b: i * i }, { c: true } ], + noise: noise, + sortValueA: i, + sortValueB: i * 0.42 * i + } + if ( i % 3 === 0 ) k++ + getAFuckingId( baby ) + base.__closure[ baby.id ] = 1 + + if ( i > 1000 ) + base.__closure[ baby.id ] = i / 1000 + + objs.push( baby ) + } + + getAFuckingId( base ) + return { commit: base, objs: objs } +} + +function getAFuckingId( obj ) { + obj.id = obj.id || crypto.createHash( 'md5' ).update( JSON.stringify( obj ) ).digest( 'hex' ) } \ No newline at end of file diff --git a/modules/core/tests/objects.spec.js b/modules/core/tests/objects.spec.js index 310cfab648..e989c01542 100644 --- a/modules/core/tests/objects.spec.js +++ b/modules/core/tests/objects.spec.js @@ -19,7 +19,7 @@ const sampleObjects = require( './sampleObjectData' ) let sampleCommit = JSON.parse( `{ "Objects": [ { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "8a9b0676b7fe3e5e487bb34549e67f67" } ], @@ -29,7 +29,7 @@ let sampleCommit = JSON.parse( `{ ], "CreatedOn": "2020-03-18T12:06:07.82307Z", "id": "79eb41764cc2c065de752bd704bfc4aa", - "speckle_type": "Speckle.Core.Commit", + "speckleType": "Speckle.Core.Commit", "__tree": [ "79eb41764cc2c065de752bd704bfc4aa.8a9b0676b7fe3e5e487bb34549e67f67" ] @@ -39,7 +39,7 @@ let sampleObject = JSON.parse( `{ "Vertices": [], "id": "8a9b0676b7fe3e5e487bb34549e67f67", "applicationId": "test", - "speckle_type": "Tests.Polyline" + "speckleType": "Tests.Polyline" }` ) describe( 'Objects', ( ) => { @@ -377,7 +377,7 @@ describe( 'Objects', ( ) => { } ) it( 'should just order results by something', async ( ) => { - + let test = await getObjectChildrenQuery( { objectId: parentObjectId, limit: 2, @@ -391,7 +391,7 @@ describe( 'Objects', ( ) => { cursor: test.cursor } ) - expect( test.objects[1].test.value ).to.equal( test2.objects[0].test.value + 1 ) // continuity check + expect( test.objects[ 1 ].test.value ).to.equal( test2.objects[ 0 ].test.value + 1 ) // continuity check let test3 = await getObjectChildrenQuery( { objectId: parentObjectId, @@ -406,11 +406,9 @@ describe( 'Objects', ( ) => { cursor: test3.cursor } ) - expect( test3.objects[49].nest.duck ).to.equal( true ) - expect( test4.objects[0].nest.duck ).to.equal( false ) + expect( test3.objects[ 49 ].nest.duck ).to.equal( true ) + expect( test4.objects[ 0 ].nest.duck ).to.equal( false ) - console.log( test3.objects[49] ) - console.log( test4.objects[0] ) } ) } ) @@ -453,8 +451,8 @@ describe( 'Objects', ( ) => { expect( commits ).to.have.status( 200 ) expect( commits.body ).to.have.lengthOf( 2 ) expect( commits.body[ 0 ] ).to.have.property( 'id' ) - expect( commits.body[ 0 ] ).to.have.property( 'speckle_type' ) - expect( commits.body[ 0 ].speckle_type ).to.equal( 'commit' ) + expect( commits.body[ 0 ] ).to.have.property( 'speckleType' ) + expect( commits.body[ 0 ].speckleType ).to.equal( 'commit' ) } ) let objs = [ ] diff --git a/modules/core/tests/sampleObjectData.js b/modules/core/tests/sampleObjectData.js index f028693452..5e28c2661d 100644 --- a/modules/core/tests/sampleObjectData.js +++ b/modules/core/tests/sampleObjectData.js @@ -2,15 +2,15 @@ module.exports = JSON.parse( `[ { "name": "depth five", "id": "99b77f596443d2a2cf59124dbc6a4a8f", - "speckle_type": "" + "speckleType": "" }, { "name": "depth four", "@detach": { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "99b77f596443d2a2cf59124dbc6a4a8f" }, "id": "c2cdd8d01c219703926f7282db700e14", - "speckle_type": "", + "speckleType": "", "__tree": [ "c2cdd8d01c219703926f7282db700e14.99b77f596443d2a2cf59124dbc6a4a8f" ], @@ -20,11 +20,11 @@ module.exports = JSON.parse( `[ }, { "name": "depth three", "@detach": { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "c2cdd8d01c219703926f7282db700e14" }, "id": "78af2314eed937c7338fccc4224393c0", - "speckle_type": "", + "speckleType": "", "__tree": [ "78af2314eed937c7338fccc4224393c0.c2cdd8d01c219703926f7282db700e14", "78af2314eed937c7338fccc4224393c0.c2cdd8d01c219703926f7282db700e14.99b77f596443d2a2cf59124dbc6a4a8f" @@ -36,17 +36,17 @@ module.exports = JSON.parse( `[ }, { "name": "depth two", "@detach": { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "78af2314eed937c7338fccc4224393c0" }, "@joker": [ { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "99b77f596443d2a2cf59124dbc6a4a8f" } ], "id": "254cb2d7094eca3c809cdac2ffc4010b", - "speckle_type": "", + "speckleType": "", "__tree": [ "254cb2d7094eca3c809cdac2ffc4010b.78af2314eed937c7338fccc4224393c0", "254cb2d7094eca3c809cdac2ffc4010b.78af2314eed937c7338fccc4224393c0.c2cdd8d01c219703926f7282db700e14", @@ -61,15 +61,15 @@ module.exports = JSON.parse( `[ }, { "name": "depth one", "@detach": { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "254cb2d7094eca3c809cdac2ffc4010b" }, "@joker": { - "speckle_type": "reference", + "speckleType": "reference", "referencedId": "99b77f596443d2a2cf59124dbc6a4a8f" }, "id": "d2b9e647cb345673ff2b35ccab45ffc2", - "speckle_type": "", + "speckleType": "", "__tree": [ "d2b9e647cb345673ff2b35ccab45ffc2.254cb2d7094eca3c809cdac2ffc4010b", "d2b9e647cb345673ff2b35ccab45ffc2.254cb2d7094eca3c809cdac2ffc4010b.78af2314eed937c7338fccc4224393c0", diff --git a/test-queries/closure-two-stage.sql b/test-queries/closure-two-stage.sql index aa9c35a7ec..bdf6f3c932 100644 --- a/test-queries/closure-two-stage.sql +++ b/test-queries/closure-two-stage.sql @@ -6,7 +6,7 @@ WITH ids AS ( objs AS ( SELECT id, - speckle_type, + speckleType, "data" FROM ids JOIN objects ON ids.child = objects.id diff --git a/test-queries/materialised-fullcount.sql b/test-queries/materialised-fullcount.sql index 9c9b58dfb3..58caf369b0 100644 --- a/test-queries/materialised-fullcount.sql +++ b/test-queries/materialised-fullcount.sql @@ -4,7 +4,7 @@ WITH ids AS ( WHERE parent = '0_hash' ), objs AS ( - SELECT obj_id as id, speckle_type, "data" + SELECT obj_id as id, speckleType, "data" FROM ids JOIN objects ON ids.obj_id = objects.id -- WHERE objects."data" @> '{"text": "This is object 1"}' diff --git a/test-queries/materialised-simple-ordinality.sql b/test-queries/materialised-simple-ordinality.sql index d0b818ede3..fccd9675b5 100644 --- a/test-queries/materialised-simple-ordinality.sql +++ b/test-queries/materialised-simple-ordinality.sql @@ -4,7 +4,7 @@ WHERE path ~ '0_hash.*{2}' ORDER BY id ) - SELECT id, speckle_type, "data" -> 'nest' -> 'orderMe' + SELECT id, speckleType, "data" -> 'nest' -> 'orderMe' FROM ids JOIN objects ON obj_id = objects.id WITH ORDINALITY OFFSET 2 diff --git a/test-queries/materialised-simple.sql b/test-queries/materialised-simple.sql index 849af2100f..982c0315f6 100644 --- a/test-queries/materialised-simple.sql +++ b/test-queries/materialised-simple.sql @@ -3,7 +3,7 @@ WITH ids AS( FROM object_tree_refs WHERE parent = '0_hash' ) -SELECT obj_id, speckle_type, "data" +SELECT obj_id, speckleType, "data" FROM ids JOIN objects ON ids.obj_id = objects.id OFFSET 0