-
Notifications
You must be signed in to change notification settings - Fork 23
Feature/sdk reporting #316
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
7546215
7eb90c4
edee67a
fea7be8
0f603cb
ef6cfd1
65c2f5b
777d426
f6370a6
79c9a5d
4685dd3
dd29d5f
cbdcaaf
fd6b080
2846a61
93bf85a
1e12209
499d98b
44209ff
063e446
e3e7aab
5359d09
ce5092b
5ff2da5
0657ffe
388ab2f
af06f64
50eb198
119f5c4
ec9ce6f
8a017f8
63b6f67
d5429c7
83d4280
8f74f27
db58d24
f1d187b
5280565
8cbbd35
a700ec9
72398c2
842e621
12ac832
6dfe510
f3c356a
617fc06
bb9dfd7
9a4094e
644de71
7d93f69
efd5600
64e397f
87e98c9
2fd9181
1ee1d39
fda348c
cb46003
b21770f
06a1859
d20451d
453ec72
ae49da8
b123e43
eb7c4c2
46dd4af
d56753e
5f59578
bb742da
28efea4
a4645b9
bc18198
6e1c1ef
83972c6
fcc0468
486a094
58fdb79
70f1408
5ded122
668b0d8
a2cf616
2e6cb41
c808e97
e7e1457
9ccf251
0b1fadd
4ea15e6
3735ab4
c9088a4
e2f7a7e
28914bf
51bbcc4
4c4144e
db23db6
f67746d
9797bad
73d8829
4682a74
f46bede
8d194f1
29b4b59
a741f41
75c6692
70eac9c
732e1d2
8899f9e
42ddacf
85d6e6b
052ca63
d4118ac
2c1eb1b
3f8cec5
f824035
27eb8f4
02a5036
c8c38df
d039559
aefa0f9
e096409
7526ab2
f1244fd
f88544f
6b5b58b
792b299
caf0c26
b2d5e9e
e538cdf
f385a50
8bed767
551edf2
cabd182
79043fa
45d17df
c7cd9b1
2b9695c
2d34818
2ff73da
0c3a853
8277c8e
4a930e9
ac40bca
5020173
c2b84d2
fc82f14
a9d453f
7baf97b
65120ce
da94778
de57ca0
05006be
794a099
9b7fde8
1af5e3a
30d0efd
10a0164
51661d8
824ed2f
92ddb08
1a6ad40
0fad466
4ac4ed3
877ce66
37825aa
29a0845
ea0c212
c1b6a0c
5a9c5ce
4f0c77d
8167d34
cbfa0cb
fa5b872
a2ad23e
a15ee2d
ed65508
9e9e91b
f986049
fe2ef90
7a6673e
4a0c4ca
d5f2ae6
acd4bea
084287c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
--- | ||
'@powersync/service-module-mongodb-storage': patch | ||
'@powersync/service-core': patch | ||
'@powersync/service-types': patch | ||
--- | ||
|
||
sdk reporting |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
--- | ||
'@powersync/service-module-postgres-storage': patch | ||
'@powersync/service-module-mongodb-storage': patch | ||
'@powersync/service-core': patch | ||
'@powersync/service-types': patch | ||
--- | ||
|
||
Added sdk reporting to storage |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
--- | ||
'@powersync/service-module-postgres-storage': patch | ||
'@powersync/service-module-mongodb-storage': patch | ||
'@powersync/service-module-postgres': patch | ||
'@powersync/service-module-mongodb': patch | ||
'@powersync/service-core': patch | ||
'@powersync/service-module-mysql': patch | ||
'@powersync/service-module-core': patch | ||
'@powersync/lib-services-framework': patch | ||
'@powersync/service-types': patch | ||
--- | ||
|
||
Reporting mongo storage added to storage engine. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
import { migrations } from '@powersync/service-core'; | ||
import * as storage from '../../../storage/storage-index.js'; | ||
import { MongoStorageConfig } from '../../../types/types.js'; | ||
|
||
export const up: migrations.PowerSyncMigrationFunction = async (context) => { | ||
const { | ||
service_context: { configuration } | ||
} = context; | ||
const db = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig); | ||
|
||
try { | ||
await db.createConnectionReportingCollection(); | ||
|
||
await db.connection_report_events.createIndex( | ||
{ | ||
connected_at: 1, | ||
jwt_exp: 1, | ||
disconnected_at: 1 | ||
}, | ||
{ name: 'connection_list_index' } | ||
); | ||
|
||
await db.connection_report_events.createIndex( | ||
{ | ||
user_id: 1 | ||
}, | ||
{ name: 'connection_user_id_index' } | ||
); | ||
await db.connection_report_events.createIndex( | ||
{ | ||
client_id: 1 | ||
}, | ||
{ name: 'connection_client_id_index' } | ||
); | ||
await db.connection_report_events.createIndex( | ||
{ | ||
sdk: 1 | ||
}, | ||
{ name: 'connection_index' } | ||
); | ||
} finally { | ||
await db.client.close(); | ||
} | ||
}; | ||
|
||
export const down: migrations.PowerSyncMigrationFunction = async (context) => { | ||
const { | ||
service_context: { configuration } | ||
} = context; | ||
|
||
const db = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig); | ||
|
||
try { | ||
await db.db.dropCollection('connection_report_events'); | ||
} finally { | ||
await db.client.close(); | ||
} | ||
}; |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,197 @@ | ||
import { mongo } from '@powersync/lib-service-mongodb'; | ||
import { storage } from '@powersync/service-core'; | ||
import { event_types } from '@powersync/service-types'; | ||
import { PowerSyncMongo } from './implementation/db.js'; | ||
import { logger } from '@powersync/lib-services-framework'; | ||
|
||
export class MongoReportStorage implements storage.ReportStorage { | ||
private readonly client: mongo.MongoClient; | ||
public readonly db: PowerSyncMongo; | ||
|
||
constructor(db: PowerSyncMongo) { | ||
this.client = db.client; | ||
this.db = db; | ||
} | ||
async deleteOldConnectionData(data: event_types.DeleteOldConnectionData): Promise<void> { | ||
const { date } = data; | ||
const result = await this.db.connection_report_events.deleteMany({ | ||
connected_at: { $lt: date }, | ||
$or: [ | ||
{ disconnected_at: { $exists: true } }, | ||
{ jwt_exp: { $lt: new Date() }, disconnected_at: { $exists: false } } | ||
] | ||
}); | ||
if (result.deletedCount > 0) { | ||
logger.info( | ||
`TTL from ${date.toISOString()}: ${result.deletedCount} MongoDB documents have been removed from connection_report_events.` | ||
); | ||
} | ||
} | ||
|
||
async getClientConnectionReports( | ||
data: event_types.ClientConnectionReportRequest | ||
): Promise<event_types.ClientConnectionReportResponse> { | ||
const { start, end } = data; | ||
const result = await this.db.connection_report_events | ||
.aggregate<event_types.ClientConnectionReportResponse>([ | ||
{ | ||
$match: { | ||
connected_at: { $lte: end, $gte: start } | ||
} | ||
}, | ||
this.connectionsFacetPipeline(), | ||
this.connectionsProjectPipeline() | ||
]) | ||
.toArray(); | ||
return result[0]; | ||
} | ||
|
||
async reportClientConnection(data: event_types.ClientConnectionBucketData): Promise<void> { | ||
const updateFilter = this.updateDocFilter(data.user_id, data.client_id!); | ||
await this.db.connection_report_events.findOneAndUpdate( | ||
updateFilter, | ||
{ | ||
$set: data, | ||
$unset: { | ||
disconnected_at: '' | ||
} | ||
}, | ||
{ | ||
upsert: true | ||
} | ||
); | ||
} | ||
async reportClientDisconnection(data: event_types.ClientDisconnectionEventData): Promise<void> { | ||
const { connected_at, user_id, client_id } = data; | ||
await this.db.connection_report_events.findOneAndUpdate( | ||
{ | ||
client_id, | ||
user_id, | ||
connected_at | ||
}, | ||
{ | ||
$set: { | ||
disconnected_at: data.disconnected_at | ||
}, | ||
$unset: { | ||
jwt_exp: '' | ||
} | ||
} | ||
); | ||
} | ||
async getConnectedClients( | ||
data: event_types.ClientConnectionsRequest | ||
): Promise<event_types.ClientConnectionReportResponse> { | ||
const timeframeFilter = this.listConnectionsDateRange(data); | ||
const result = await this.db.connection_report_events | ||
.aggregate<event_types.ClientConnectionReportResponse>([ | ||
{ | ||
$match: { | ||
disconnected_at: { $exists: false }, | ||
jwt_exp: { $gt: new Date() }, | ||
...timeframeFilter | ||
Comment on lines
+90
to
+92
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same as commented on Postgres storage - we should clarify whether this is expected to only be for current connections, or whether the timeframe can be used to fetch connections in the past. |
||
} | ||
}, | ||
this.connectionsFacetPipeline(), | ||
this.connectionsProjectPipeline() | ||
]) | ||
.toArray(); | ||
return result[0]; | ||
} | ||
|
||
async [Symbol.asyncDispose]() { | ||
// No-op | ||
} | ||
|
||
EspressoTrip-v2 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
private parseJsDate(date: Date) { | ||
const year = date.getFullYear(); | ||
const month = date.getMonth(); | ||
const today = date.getDate(); | ||
const day = date.getDay(); | ||
return { | ||
year, | ||
month, | ||
today, | ||
day, | ||
parsedDate: date | ||
}; | ||
} | ||
|
||
private connectionsFacetPipeline() { | ||
return { | ||
$facet: { | ||
unique_users: [ | ||
{ | ||
$group: { | ||
_id: '$user_id' | ||
} | ||
}, | ||
{ | ||
$count: 'count' | ||
} | ||
], | ||
sdk_versions_array: [ | ||
{ | ||
$group: { | ||
_id: '$sdk', | ||
total: { $sum: 1 }, | ||
client_ids: { $addToSet: '$client_id' }, | ||
user_ids: { $addToSet: '$user_id' } | ||
} | ||
}, | ||
{ | ||
$project: { | ||
_id: 0, | ||
sdk: '$_id', | ||
users: { $size: '$user_ids' }, | ||
clients: { $size: '$client_ids' } | ||
} | ||
}, | ||
{ | ||
$sort: { | ||
sdk: 1 | ||
} | ||
} | ||
] | ||
} | ||
}; | ||
} | ||
|
||
private connectionsProjectPipeline() { | ||
return { | ||
$project: { | ||
users: { $ifNull: [{ $arrayElemAt: ['$unique_users.count', 0] }, 0] }, | ||
sdks: '$sdk_versions_array' | ||
} | ||
}; | ||
} | ||
|
||
private updateDocFilter(userId: string, clientId: string) { | ||
const { year, month, today } = this.parseJsDate(new Date()); | ||
const nextDay = today + 1; | ||
return { | ||
user_id: userId, | ||
client_id: clientId, | ||
connected_at: { | ||
// Need to create a new date here to sett the time to 00:00:00 | ||
$gte: new Date(year, month, today), | ||
$lt: new Date(year, month, nextDay) | ||
} | ||
Comment on lines
+177
to
+179
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This depends on the timezone of the server - not sure if that is what we want? But I'm also wondering - do we specifically want this behavior ("if the connection event is within the same day the stored initial connection is updated")? Why not store these as separate connection events, and then handle any other logic in the aggregations? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Dylan was concerned about storing the connections as logs in the ps service db, due to volume. Considering we are using per day, per week and per month, we only need day granularity. When a user refreshes the web sdk it actually causes a disconnect and a reconnect which would make multiple copies withing very a short period if they refresh a few times. So to reduce the redundant connections by the same user I did it this way. Which still gives us the minimal per day granularity. The date objects get converted to UTC time by Mongo. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ok, we can keep it as daily connections for now - it looks like we can change that later if needed without too much effort.
If we do keep daily values, we need to be very explicit about time zones. As is, if I run the server in UTC+2, the filter here would use Now in our hosted environment at least, it is likely that the service runs in the UTC timezone already, and this won't make a difference. But if we're relying on that, it is better to be explicit - use UTC timezones here ( |
||
}; | ||
} | ||
|
||
private listConnectionsDateRange(data: event_types.ClientConnectionsRequest) { | ||
const { range } = data; | ||
if (!range) { | ||
return undefined; | ||
} | ||
const endDate = data.range?.end ? new Date(data.range.end) : new Date(); | ||
const startDate = new Date(range.start); | ||
return { | ||
connected_at: { | ||
$lte: endDate, | ||
$gte: startDate | ||
} | ||
}; | ||
} | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Same as for postgres - we should use connection ids here, rather than assume this combination is unique.