Skip to content

Commit

Permalink
Merge pull request #219 from GNS-Science/main
Browse files Browse the repository at this point in the history
resync wit main
  • Loading branch information
chrisbc authored Jun 20, 2024
2 parents fbeee82 + e13a619 commit d3d4157
Show file tree
Hide file tree
Showing 10 changed files with 158 additions and 83 deletions.
17 changes: 14 additions & 3 deletions graphql_api/data/base_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@
BaseData is the base class for AWS_S3 data handlers
"""

import enum
import json
import logging
import random
from collections import namedtuple
from datetime import datetime as dt
from importlib import import_module
from io import BytesIO
from typing import Dict

import backoff
import boto3
Expand Down Expand Up @@ -48,6 +50,14 @@ def append_uniq(size):
return str(size) + uniq


def replace_enums(kwargs: Dict) -> Dict:
new_kwargs = kwargs.copy()
for key, value in kwargs.items():
if isinstance(value, enum.Enum):
new_kwargs[key] = value.value
return new_kwargs


class BaseData:
"""
BaseData is the base class for data handlers
Expand Down Expand Up @@ -317,7 +327,7 @@ def transact_update(self, object_id, object_type, body):
model = self._model.get(object_id)
assert model.object_type == body.get('clazz_name')
with TransactWrite(connection=self._connection) as transaction:
transaction.update(model, actions=[self._model.object_content.set(body)])
transaction.update(model, actions=[self._model.object_content.set(replace_enums(body))])

es_key = f"{self._prefix}_{object_id}"
self._db_manager.search_manager.index_document(es_key, body)
Expand All @@ -344,9 +354,10 @@ def create(self, clazz_name, **kwargs):
next_id = self.get_next_id()

# TODO: this whole approach sucks !@#%$#
# consider the ENUM problem, and datatime serialisatin
# consider the ENUM problem, and datatime serialisation
# mayby graphene o
# cant we just use the graphene classes json serialisation ??

def new_body(next_id, kwargs):
new = clazz(next_id, **kwargs)
body = new.__dict__.copy()
Expand All @@ -365,7 +376,7 @@ def new_body(next_id, kwargs):
# # print( graphql.utilities.value_from_ast_untyped(object_instance.created) )

try:
self._write_object(next_id, self._prefix, new_body(next_id, kwargs))
self._write_object(next_id, self._prefix, new_body(next_id, replace_enums(kwargs)))
except Exception as err:
logger.error(F"faild to write {clazz_name} {kwargs} {err}")
raise
Expand Down
9 changes: 1 addition & 8 deletions graphql_api/schema/custom/aggregate_inversion_solution.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
This module contains the schema definition for a AggregateInversionSolution.
"""
import copy
import logging
from datetime import datetime as dt

Expand Down Expand Up @@ -92,12 +91,6 @@ class Input:
def mutate_and_get_payload(cls, root, info, **kwargs):
t0 = dt.utcnow()
log.info(f"CreateAggregateInversionSolution mutate_and_get_payload {kwargs}")

json_ready_input = copy.copy(kwargs)

for fld in ['aggregation_fn']:
json_ready_input[fld] = json_ready_input[fld].value

solution = get_data_manager().file.create('AggregateInversionSolution', **json_ready_input)
solution = get_data_manager().file.create('AggregateInversionSolution', **kwargs)
db_metrics.put_duration(__name__, 'CreateAggregateInversionSolution.mutate_and_get_payload', dt.utcnow() - t0)
return CreateAggregateInversionSolution(solution=solution, ok=True)
10 changes: 1 addition & 9 deletions graphql_api/schema/custom/automation_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"""

import copy
import logging
from datetime import datetime as dt

Expand Down Expand Up @@ -114,15 +113,8 @@ class Arguments:
@classmethod
def mutate(cls, root, info, input):
t0 = dt.utcnow()
json_ready_input = copy.copy(input)

for fld in ['result', 'state', 'task_type']:
json_ready_input[fld] = json_ready_input[fld].value

log.info(f"payload: {json_ready_input}")
task_result = get_data_manager().thing.create('AutomationTask', **json_ready_input)
task_result = get_data_manager().thing.create('AutomationTask', **input)
log.info(f"task_result: {task_result}")

db_metrics.put_duration(__name__, 'CreateAutomationTask.mutate', dt.utcnow() - t0)
return CreateAutomationTask(task_result=task_result)

Expand Down
1 change: 0 additions & 1 deletion graphql_api/schema/custom/inversion_solution.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ def mutate_and_get_payload(cls, root, info, **kwargs):
log.info(f"CreateInversionSolution mutate_and_get_payload {kwargs}")
inversion_solution = get_data_manager().file.create('InversionSolution', **kwargs)
db_metrics.put_duration(__name__, 'CreateInversionSolution.mutate_and_get_payload', dt.utcnow() - t0)

solution = CreateInversionSolution(inversion_solution=inversion_solution, ok=True)
log.info(f"solution: {solution}")
return solution
Expand Down
8 changes: 1 addition & 7 deletions graphql_api/schema/custom/openquake_hazard_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"""

import copy
import logging
from datetime import datetime as dt

Expand Down Expand Up @@ -100,16 +99,11 @@ def mutate(cls, root, info, input):
input_type, nid = from_global_id(input.config)
assert input_type == "OpenquakeHazardConfig"

json_ready_input = copy.copy(input)

for fld in ['task_type', 'model_type', 'state', 'result']:
json_ready_input[fld] = json_ready_input[fld].value

ref = get_data_manager().thing.get_one(nid)
log.debug(f"Got a ref to a real thing: {ref} with thing id: {nid}")
if not ref:
raise Exception("Broken input")
openquake_hazard_task = get_data_manager().thing.create('OpenquakeHazardTask', **json_ready_input)
openquake_hazard_task = get_data_manager().thing.create('OpenquakeHazardTask', **input)
db_metrics.put_duration(__name__, 'CreateOpenquakeHazardTask.mutate', dt.utcnow() - t0)
return CreateOpenquakeHazardTask(openquake_hazard_task=openquake_hazard_task)

Expand Down
17 changes: 8 additions & 9 deletions graphql_api/schema/custom/rupture_generation_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"""

import copy
import logging
from datetime import datetime as dt

Expand Down Expand Up @@ -58,12 +57,12 @@ def resolve_total_count(root, info, *args, **kwargs):
return len(root.edges)


def json_ready(input):
json_ready_input = copy.copy(input)
for fld in ['result', 'state']:
if json_ready_input.get(fld):
json_ready_input[fld] = json_ready_input[fld].value
return json_ready_input
# def json_ready(input):
# json_ready_input = copy.copy(input)
# for fld in ['result', 'state']:
# if json_ready_input.get(fld):
# json_ready_input[fld] = json_ready_input[fld].value
# return json_ready_input


class CreateRuptureGenerationTask(graphene.Mutation):
Expand All @@ -76,7 +75,7 @@ class Arguments:
def mutate(cls, root, info, input):
t0 = dt.utcnow()
log.info(f"CreateRuptureGenerationTaskmnutate {input}")
task_result = get_data_manager().thing.create('RuptureGenerationTask', **json_ready(input))
task_result = get_data_manager().thing.create('RuptureGenerationTask', **input)
db_metrics.put_duration(__name__, 'CreateRuptureGenerationTask.mutate_and_get_payload', dt.utcnow() - t0)
return CreateRuptureGenerationTask(task_result=task_result)

Expand All @@ -94,6 +93,6 @@ def mutate(cls, root, info, input):
log.info(f"UpdateRuptureGenerationTask {input}")
thing_id = input.pop('task_id')
log.info(f"UpdateRuptureGenerationTask thing_id {thing_id}")
task_result = get_data_manager().thing.update('RuptureGenerationTask', thing_id, **json_ready(input))
task_result = get_data_manager().thing.update('RuptureGenerationTask', thing_id, **input)
db_metrics.put_duration(__name__, 'UpdateRuptureGenerationTask.mutate_and_get_payload', dt.utcnow() - t0)
return UpdateRuptureGenerationTask(task_result=task_result)
8 changes: 1 addition & 7 deletions graphql_api/schema/custom/strong_motion_station.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
"""

import copy
import logging

import graphene
Expand Down Expand Up @@ -109,10 +108,5 @@ class Input:
@classmethod
def mutate_and_get_payload(cls, root, info, **kwargs):
print("mutate_and_get_payload: ", kwargs)

json_ready_input = copy.copy(kwargs)
for fld in ['site_class', 'site_class_basis']:
json_ready_input[fld] = json_ready_input[fld].value

strong_motion_station = get_data_manager().thing.create('StrongMotionStation', **json_ready_input)
strong_motion_station = get_data_manager().thing.create('StrongMotionStation', **kwargs)
return CreateStrongMotionStation(strong_motion_station=strong_motion_station)
7 changes: 1 addition & 6 deletions graphql_api/schema/custom/strong_motion_station_file.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#! strong_motion_station_file.py

import copy

import graphene
from graphene import Enum, relay
Expand Down Expand Up @@ -38,9 +37,5 @@ class Arguments:
file_result = graphene.Field(SmsFile)

def mutate(self, info, **kwargs):
# print("CreateFile.mutate: ", file_in, kwargs)
json_ready_input = copy.copy(kwargs)
for fld in ['file_type']:
json_ready_input[fld] = json_ready_input[fld].value
file_result = get_data_manager().file.create('SmsFile', **json_ready_input)
file_result = get_data_manager().file.create('SmsFile', **kwargs)
return CreateSmsFile(ok=True, file_result=file_result)
85 changes: 85 additions & 0 deletions graphql_api/tests/test_general_task_bugfix_217.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
"""
Test API function for GeneralTask
using moto mocking
"""

import datetime as dt
import json
import unittest
from io import BytesIO

import boto3
import pytest
from dateutil.tz import tzutc
from graphene.test import Client
from graphql_relay import from_global_id, to_global_id
from moto import mock_dynamodb, mock_s3
from pynamodb.connection.base import Connection # for mocking

from graphql_api.config import REGION, S3_BUCKET_NAME
from graphql_api.data import data_manager
from graphql_api.dynamodb.models import ToshiFileObject, ToshiIdentity, ToshiThingObject
from graphql_api.schema import root_schema
from graphql_api.schema.search_manager import SearchManager

CREATE_GT = '''
mutation new_gt ($created: DateTime!) {
create_general_task(input:{
created: $created
title: "TEST Build opensha rupture set Coulomb #1"
description:"Using "
agent_name:"chrisbc"
subtask_type: OPENQUAKE_HAZARD,
model_type: COMPOSITE
argument_lists: [{k: "some_metric", v: ["20", "25"]}]
})
{
general_task{
id
subtask_type
model_type
}
}
}
'''


@mock_s3
@mock_dynamodb
class TestGeneralTaskBug217(unittest.TestCase):

def setUp(self):
self.client = Client(root_schema)
# migrate()

self._s3_conn = boto3.resource('s3', region_name=REGION)
self._s3_conn.create_bucket(Bucket=S3_BUCKET_NAME)
self._bucket = self._s3_conn.Bucket(S3_BUCKET_NAME)
self._connection = Connection(region=REGION)

ToshiThingObject.create_table()
ToshiFileObject.create_table()
ToshiIdentity.create_table()

self._data_manager = data_manager.DataManager(search_manager=SearchManager('test', 'test', {'fake': 'auth'}))

def test_create_two_gts_and_link_them(self):
# the first GT
gt1_result = self.client.execute(CREATE_GT, variable_values=dict(created=dt.datetime.now(tzutc())))
print(gt1_result)
assert gt1_result['data']['create_general_task']['general_task']['id'] == 'R2VuZXJhbFRhc2s6MTAwMDAw'
assert gt1_result['data']['create_general_task']['general_task']['subtask_type'] == 'OPENQUAKE_HAZARD'
assert gt1_result['data']['create_general_task']['general_task']['model_type'] == 'COMPOSITE'

# # the second
# gt2_result = self.client.execute(CREATE_GT, variable_values=dict(created=dt.datetime.now(tzutc())))
# print(gt2_result)
# assert gt2_result['data']['create_general_task']['general_task']['id'] == 'R2VuZXJhbFRhc2s6MQ=='

# # finally the relation
# gt_link_result = self.client.execute(
# CREATE_GT_RELATION, variable_values=dict(parent_id='R2VuZXJhbFRhc2s6MA==', child_id='R2VuZXJhbFRhc2s6MQ==')
# )

# print('GTLINK ', gt_link_result)
# assert gt_link_result['data']['create_task_relation']['ok'] == True
Loading

0 comments on commit d3d4157

Please sign in to comment.