From 26be1782402d82d1eb61ebe8f1eda20c1a2f594a Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Wed, 16 Aug 2017 19:18:46 +0900 Subject: [PATCH 01/35] google-assistant-grpc: add device action support - add device action proto (generated from cr/165410721) in google-assistant-grpc/proto - add device id to pushtotalk sample - modify nox to point to the proto directory if present - regenerate python bindings using grpc-tools - add support for async custom blink command execution - add test and fix lint Bug: 64613463 Change-Id: I8609c501bbd1098eda9abd01e7e1d010d344624b --- .../v1alpha1/embedded_assistant_pb2.py | 161 +++++++-- .../v1alpha1/embedded_assistant_pb2_grpc.py | 5 +- google-assistant-grpc/nox.py | 23 +- .../v1alpha1/embedded_assistant.proto | 318 ++++++++++++++++++ google-assistant-sdk/README.rst | 56 ++- google-assistant-sdk/blink.wav | Bin 0 -> 55726 bytes .../googlesamples/assistant/grpc/README.rst | 12 + .../assistant/grpc/actions/blink.json | 31 ++ .../assistant/grpc/audio_helpers.py | 1 - .../assistant/grpc/device_helpers.py | 99 ++++++ .../assistant/grpc/pushtotalk.py | 58 +++- .../assistant/grpc/requirements.txt | 1 + .../tests/test_device_helpers.py | 115 +++++++ 13 files changed, 828 insertions(+), 52 deletions(-) create mode 100755 google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto create mode 100644 google-assistant-sdk/blink.wav create mode 100644 google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json create mode 100644 google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py create mode 100644 google-assistant-sdk/tests/test_device_helpers.py diff --git a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py index d5bf2e2..544e712 100644 --- a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py +++ b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py @@ -13,6 +13,7 @@ _sym_db = _symbol_database.Default() +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 @@ -20,10 +21,9 @@ name='google/assistant/embedded/v1alpha1/embedded_assistant.proto', package='google.assistant.embedded.v1alpha1', syntax='proto3', - serialized_pb=_b('\n;google/assistant/embedded/v1alpha1/embedded_assistant.proto\x12\"google.assistant.embedded.v1alpha1\x1a\x17google/rpc/status.proto\"\xf5\x01\n\x0e\x43onverseConfig\x12J\n\x0f\x61udio_in_config\x18\x01 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.AudioInConfig\x12L\n\x10\x61udio_out_config\x18\x02 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.AudioOutConfig\x12I\n\x0e\x63onverse_state\x18\x03 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.ConverseState\"\xb6\x01\n\rAudioInConfig\x12L\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32:.google.assistant.embedded.v1alpha1.AudioInConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\"<\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\"\xe3\x01\n\x0e\x41udioOutConfig\x12M\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32;.google.assistant.embedded.v1alpha1.AudioOutConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x19\n\x11volume_percentage\x18\x03 \x01(\x05\"L\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0f\n\x0bOPUS_IN_OGG\x10\x03\"+\n\rConverseState\x12\x1a\n\x12\x63onversation_state\x18\x01 \x01(\x0c\"\x1e\n\x08\x41udioOut\x12\x12\n\naudio_data\x18\x01 \x01(\x0c\"\xbd\x02\n\x0e\x43onverseResult\x12\x1b\n\x13spoken_request_text\x18\x01 \x01(\t\x12\x1c\n\x14spoken_response_text\x18\x02 \x01(\t\x12\x1a\n\x12\x63onversation_state\x18\x03 \x01(\x0c\x12Z\n\x0fmicrophone_mode\x18\x04 \x01(\x0e\x32\x41.google.assistant.embedded.v1alpha1.ConverseResult.MicrophoneMode\x12\x19\n\x11volume_percentage\x18\x05 \x01(\x05\"]\n\x0eMicrophoneMode\x12\x1f\n\x1bMICROPHONE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x43LOSE_MICROPHONE\x10\x01\x12\x14\n\x10\x44IALOG_FOLLOW_ON\x10\x02\"\x7f\n\x0f\x43onverseRequest\x12\x44\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseConfigH\x00\x12\x12\n\x08\x61udio_in\x18\x02 \x01(\x0cH\x00\x42\x12\n\x10\x63onverse_request\"\xea\x02\n\x10\x43onverseResponse\x12#\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12T\n\nevent_type\x18\x02 \x01(\x0e\x32>.google.assistant.embedded.v1alpha1.ConverseResponse.EventTypeH\x00\x12\x41\n\taudio_out\x18\x03 \x01(\x0b\x32,.google.assistant.embedded.v1alpha1.AudioOutH\x00\x12\x44\n\x06result\x18\x05 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseResultH\x00\"=\n\tEventType\x12\x1a\n\x16\x45VENT_TYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x45ND_OF_UTTERANCE\x10\x01\x42\x13\n\x11\x63onverse_response2\x8e\x01\n\x11\x45mbeddedAssistant\x12y\n\x08\x43onverse\x12\x33.google.assistant.embedded.v1alpha1.ConverseRequest\x1a\x34.google.assistant.embedded.v1alpha1.ConverseResponse(\x01\x30\x01\x42\x86\x01\n&com.google.assistant.embedded.v1alpha1B\x0e\x41ssistantProtoP\x01ZJgoogle.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embeddedb\x06proto3') + serialized_pb=_b('\n;google/assistant/embedded/v1alpha1/embedded_assistant.proto\x12\"google.assistant.embedded.v1alpha1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/rpc/status.proto\"\xbe\x02\n\x0e\x43onverseConfig\x12J\n\x0f\x61udio_in_config\x18\x01 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.AudioInConfig\x12L\n\x10\x61udio_out_config\x18\x02 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.AudioOutConfig\x12I\n\x0e\x63onverse_state\x18\x03 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.ConverseState\x12G\n\rdevice_config\x18\x04 \x01(\x0b\x32\x30.google.assistant.embedded.v1alpha1.DeviceConfig\"\xb6\x01\n\rAudioInConfig\x12L\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32:.google.assistant.embedded.v1alpha1.AudioInConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\"<\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\"\xe3\x01\n\x0e\x41udioOutConfig\x12M\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32;.google.assistant.embedded.v1alpha1.AudioOutConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x19\n\x11volume_percentage\x18\x03 \x01(\x05\"L\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0f\n\x0bOPUS_IN_OGG\x10\x03\"+\n\rConverseState\x12\x1a\n\x12\x63onversation_state\x18\x01 \x01(\x0c\"\x1e\n\x08\x41udioOut\x12\x12\n\naudio_data\x18\x01 \x01(\x0c\"\xbd\x02\n\x0e\x43onverseResult\x12\x1b\n\x13spoken_request_text\x18\x01 \x01(\t\x12\x1c\n\x14spoken_response_text\x18\x02 \x01(\t\x12\x1a\n\x12\x63onversation_state\x18\x03 \x01(\x0c\x12Z\n\x0fmicrophone_mode\x18\x04 \x01(\x0e\x32\x41.google.assistant.embedded.v1alpha1.ConverseResult.MicrophoneMode\x12\x19\n\x11volume_percentage\x18\x05 \x01(\x05\"]\n\x0eMicrophoneMode\x12\x1f\n\x1bMICROPHONE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x43LOSE_MICROPHONE\x10\x01\x12\x14\n\x10\x44IALOG_FOLLOW_ON\x10\x02\"\x7f\n\x0f\x43onverseRequest\x12\x44\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseConfigH\x00\x12\x12\n\x08\x61udio_in\x18\x02 \x01(\x0cH\x00\x42\x12\n\x10\x63onverse_request\"\xb5\x03\n\x10\x43onverseResponse\x12#\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12T\n\nevent_type\x18\x02 \x01(\x0e\x32>.google.assistant.embedded.v1alpha1.ConverseResponse.EventTypeH\x00\x12\x41\n\taudio_out\x18\x03 \x01(\x0b\x32,.google.assistant.embedded.v1alpha1.AudioOutH\x00\x12I\n\rdevice_action\x18\t \x01(\x0b\x32\x30.google.assistant.embedded.v1alpha1.DeviceActionH\x00\x12\x44\n\x06result\x18\x05 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseResultH\x00\"=\n\tEventType\x12\x1a\n\x16\x45VENT_TYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x45ND_OF_UTTERANCE\x10\x01\x42\x13\n\x11\x63onverse_response\"!\n\x0c\x44\x65viceConfig\x12\x11\n\tdevice_id\x18\x01 \x01(\t\"+\n\x0c\x44\x65viceAction\x12\x1b\n\x13\x64\x65vice_request_json\x18\x02 \x01(\t2\x8e\x01\n\x11\x45mbeddedAssistant\x12y\n\x08\x43onverse\x12\x33.google.assistant.embedded.v1alpha1.ConverseRequest\x1a\x34.google.assistant.embedded.v1alpha1.ConverseResponse(\x01\x30\x01\x42\x86\x01\n&com.google.assistant.embedded.v1alpha1B\x0e\x41ssistantProtoP\x01ZJgoogle.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embeddedb\x06proto3') , - dependencies=[google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) @@ -48,8 +48,8 @@ ], containing_type=None, options=None, - serialized_start=495, - serialized_end=555, + serialized_start=598, + serialized_end=658, ) _sym_db.RegisterEnumDescriptor(_AUDIOINCONFIG_ENCODING) @@ -78,8 +78,8 @@ ], containing_type=None, options=None, - serialized_start=709, - serialized_end=785, + serialized_start=812, + serialized_end=888, ) _sym_db.RegisterEnumDescriptor(_AUDIOOUTCONFIG_ENCODING) @@ -104,8 +104,8 @@ ], containing_type=None, options=None, - serialized_start=1089, - serialized_end=1182, + serialized_start=1192, + serialized_end=1285, ) _sym_db.RegisterEnumDescriptor(_CONVERSERESULT_MICROPHONEMODE) @@ -126,8 +126,8 @@ ], containing_type=None, options=None, - serialized_start=1594, - serialized_end=1655, + serialized_start=1772, + serialized_end=1833, ) _sym_db.RegisterEnumDescriptor(_CONVERSERESPONSE_EVENTTYPE) @@ -160,6 +160,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='device_config', full_name='google.assistant.embedded.v1alpha1.ConverseConfig.device_config', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -172,8 +179,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=125, - serialized_end=370, + serialized_start=155, + serialized_end=473, ) @@ -211,8 +218,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=373, - serialized_end=555, + serialized_start=476, + serialized_end=658, ) @@ -257,8 +264,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=558, - serialized_end=785, + serialized_start=661, + serialized_end=888, ) @@ -288,8 +295,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=787, - serialized_end=830, + serialized_start=890, + serialized_end=933, ) @@ -319,8 +326,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=832, - serialized_end=862, + serialized_start=935, + serialized_end=965, ) @@ -379,8 +386,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=865, - serialized_end=1182, + serialized_start=968, + serialized_end=1285, ) @@ -420,8 +427,8 @@ name='converse_request', full_name='google.assistant.embedded.v1alpha1.ConverseRequest.converse_request', index=0, containing_type=None, fields=[]), ], - serialized_start=1184, - serialized_end=1311, + serialized_start=1287, + serialized_end=1414, ) @@ -454,7 +461,14 @@ is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( - name='result', full_name='google.assistant.embedded.v1alpha1.ConverseResponse.result', index=3, + name='device_action', full_name='google.assistant.embedded.v1alpha1.ConverseResponse.device_action', index=3, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='result', full_name='google.assistant.embedded.v1alpha1.ConverseResponse.result', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -476,13 +490,76 @@ name='converse_response', full_name='google.assistant.embedded.v1alpha1.ConverseResponse.converse_response', index=0, containing_type=None, fields=[]), ], - serialized_start=1314, - serialized_end=1676, + serialized_start=1417, + serialized_end=1854, +) + + +_DEVICECONFIG = _descriptor.Descriptor( + name='DeviceConfig', + full_name='google.assistant.embedded.v1alpha1.DeviceConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device_id', full_name='google.assistant.embedded.v1alpha1.DeviceConfig.device_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1856, + serialized_end=1889, +) + + +_DEVICEACTION = _descriptor.Descriptor( + name='DeviceAction', + full_name='google.assistant.embedded.v1alpha1.DeviceAction', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device_request_json', full_name='google.assistant.embedded.v1alpha1.DeviceAction.device_request_json', index=0, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1891, + serialized_end=1934, ) _CONVERSECONFIG.fields_by_name['audio_in_config'].message_type = _AUDIOINCONFIG _CONVERSECONFIG.fields_by_name['audio_out_config'].message_type = _AUDIOOUTCONFIG _CONVERSECONFIG.fields_by_name['converse_state'].message_type = _CONVERSESTATE +_CONVERSECONFIG.fields_by_name['device_config'].message_type = _DEVICECONFIG _AUDIOINCONFIG.fields_by_name['encoding'].enum_type = _AUDIOINCONFIG_ENCODING _AUDIOINCONFIG_ENCODING.containing_type = _AUDIOINCONFIG _AUDIOOUTCONFIG.fields_by_name['encoding'].enum_type = _AUDIOOUTCONFIG_ENCODING @@ -499,6 +576,7 @@ _CONVERSERESPONSE.fields_by_name['error'].message_type = google_dot_rpc_dot_status__pb2._STATUS _CONVERSERESPONSE.fields_by_name['event_type'].enum_type = _CONVERSERESPONSE_EVENTTYPE _CONVERSERESPONSE.fields_by_name['audio_out'].message_type = _AUDIOOUT +_CONVERSERESPONSE.fields_by_name['device_action'].message_type = _DEVICEACTION _CONVERSERESPONSE.fields_by_name['result'].message_type = _CONVERSERESULT _CONVERSERESPONSE_EVENTTYPE.containing_type = _CONVERSERESPONSE _CONVERSERESPONSE.oneofs_by_name['converse_response'].fields.append( @@ -510,6 +588,9 @@ _CONVERSERESPONSE.oneofs_by_name['converse_response'].fields.append( _CONVERSERESPONSE.fields_by_name['audio_out']) _CONVERSERESPONSE.fields_by_name['audio_out'].containing_oneof = _CONVERSERESPONSE.oneofs_by_name['converse_response'] +_CONVERSERESPONSE.oneofs_by_name['converse_response'].fields.append( + _CONVERSERESPONSE.fields_by_name['device_action']) +_CONVERSERESPONSE.fields_by_name['device_action'].containing_oneof = _CONVERSERESPONSE.oneofs_by_name['converse_response'] _CONVERSERESPONSE.oneofs_by_name['converse_response'].fields.append( _CONVERSERESPONSE.fields_by_name['result']) _CONVERSERESPONSE.fields_by_name['result'].containing_oneof = _CONVERSERESPONSE.oneofs_by_name['converse_response'] @@ -521,6 +602,9 @@ DESCRIPTOR.message_types_by_name['ConverseResult'] = _CONVERSERESULT DESCRIPTOR.message_types_by_name['ConverseRequest'] = _CONVERSEREQUEST DESCRIPTOR.message_types_by_name['ConverseResponse'] = _CONVERSERESPONSE +DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG +DESCRIPTOR.message_types_by_name['DeviceAction'] = _DEVICEACTION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) ConverseConfig = _reflection.GeneratedProtocolMessageType('ConverseConfig', (_message.Message,), dict( DESCRIPTOR = _CONVERSECONFIG, @@ -578,6 +662,20 @@ )) _sym_db.RegisterMessage(ConverseResponse) +DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), dict( + DESCRIPTOR = _DEVICECONFIG, + __module__ = 'google.assistant.embedded.v1alpha1.embedded_assistant_pb2' + # @@protoc_insertion_point(class_scope:google.assistant.embedded.v1alpha1.DeviceConfig) + )) +_sym_db.RegisterMessage(DeviceConfig) + +DeviceAction = _reflection.GeneratedProtocolMessageType('DeviceAction', (_message.Message,), dict( + DESCRIPTOR = _DEVICEACTION, + __module__ = 'google.assistant.embedded.v1alpha1.embedded_assistant_pb2' + # @@protoc_insertion_point(class_scope:google.assistant.embedded.v1alpha1.DeviceAction) + )) +_sym_db.RegisterMessage(DeviceAction) + DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n&com.google.assistant.embedded.v1alpha1B\016AssistantProtoP\001ZJgoogle.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embedded')) @@ -585,10 +683,10 @@ # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. import grpc - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities from grpc.beta import implementations as beta_implementations from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities class EmbeddedAssistantStub(object): @@ -635,6 +733,7 @@ def Converse(self, request_iterator, context): * ConverseResponse.audio_out * ConverseResponse.audio_out + The user then says *bagels* and the assistant responds *OK, I've added bagels to your shopping list*. This is sent as another gRPC connection call to the `Converse` method, again with streamed requests and @@ -704,6 +803,7 @@ def Converse(self, request_iterator, context): * ConverseResponse.audio_out * ConverseResponse.audio_out + The user then says *bagels* and the assistant responds *OK, I've added bagels to your shopping list*. This is sent as another gRPC connection call to the `Converse` method, again with streamed requests and @@ -758,6 +858,7 @@ def Converse(self, request_iterator, timeout, metadata=None, with_call=False, pr * ConverseResponse.audio_out * ConverseResponse.audio_out + The user then says *bagels* and the assistant responds *OK, I've added bagels to your shopping list*. This is sent as another gRPC connection call to the `Converse` method, again with streamed requests and diff --git a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py index 9cf133c..7e424e8 100644 --- a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py +++ b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py @@ -1,9 +1,7 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from grpc.framework.common import cardinality -from grpc.framework.interfaces.face import utilities as face_utilities -import google.assistant.embedded.v1alpha1.embedded_assistant_pb2 as google_dot_assistant_dot_embedded_dot_v1alpha1_dot_embedded__assistant__pb2 +from google.assistant.embedded.v1alpha1 import embedded_assistant_pb2 as google_dot_assistant_dot_embedded_dot_v1alpha1_dot_embedded__assistant__pb2 class EmbeddedAssistantStub(object): @@ -50,6 +48,7 @@ def Converse(self, request_iterator, context): * ConverseResponse.audio_out * ConverseResponse.audio_out + The user then says *bagels* and the assistant responds *OK, I've added bagels to your shopping list*. This is sent as another gRPC connection call to the `Converse` method, again with streamed requests and diff --git a/google-assistant-grpc/nox.py b/google-assistant-grpc/nox.py index 9ae41ec..dc82b57 100644 --- a/google-assistant-grpc/nox.py +++ b/google-assistant-grpc/nox.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + import nox @@ -29,12 +31,21 @@ def lint(session): def protoc(session): session.install('pip', 'setuptools') session.install('grpcio-tools') - session.run('python', '-m', 'grpc_tools.protoc', - '--proto_path=googleapis', - '--python_out=.', - '--grpc_python_out=sdk/grpc/', - 'googleapis/google/assistant/embedded/v1alpha1/' - 'embedded_assistant.proto') + if os.path.exists('proto'): + session.run('python', '-m', 'grpc_tools.protoc', + '--proto_path=proto', + '--proto_path=googleapis', + '--python_out=.', + '--grpc_python_out=.', + 'proto/google/assistant/embedded/v1alpha1/' + 'embedded_assistant.proto') + else: + session.run('python', '-m', 'grpc_tools.protoc', + '--proto_path=googleapis', + '--python_out=.', + '--grpc_python_out=.', + 'googleapis/google/assistant/embedded/v1alpha1/' + 'embedded_assistant.proto') @nox.session diff --git a/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto b/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto new file mode 100755 index 0000000..08b487a --- /dev/null +++ b/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto @@ -0,0 +1,318 @@ +// Copyright 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.assistant.embedded.v1alpha1; + +import "google/api/annotations.proto"; +import "google/rpc/status.proto"; + +option go_package = "google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embedded"; +option java_multiple_files = true; +option java_outer_classname = "AssistantProto"; +option java_package = "com.google.assistant.embedded.v1alpha1"; + + +// Service that implements Google Assistant API. +service EmbeddedAssistant { + // Initiates or continues a conversation with the embedded assistant service. + // Each call performs one round-trip, sending an audio request to the service + // and receiving the audio response. Uses bidirectional streaming to receive + // results, such as the `END_OF_UTTERANCE` event, while sending audio. + // + // A conversation is one or more gRPC connections, each consisting of several + // streamed requests and responses. + // For example, the user says *Add to my shopping list* and the assistant + // responds *What do you want to add?*. The sequence of streamed requests and + // responses in the first gRPC message could be: + // + // * ConverseRequest.config + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // + // + // The user then says *bagels* and the assistant responds + // *OK, I've added bagels to your shopping list*. This is sent as another gRPC + // connection call to the `Converse` method, again with streamed requests and + // responses, such as: + // + // * ConverseRequest.config + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.microphone_mode.CLOSE_MICROPHONE + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // + // Although the precise order of responses is not guaranteed, sequential + // ConverseResponse.audio_out messages will always contain sequential portions + // of audio. + rpc Converse(stream ConverseRequest) returns (stream ConverseResponse); +} + +// Specifies how to process the `ConverseRequest` messages. +message ConverseConfig { + // Specifies how to process the subsequent incoming audio. Required if + // audio_in bytes are to be provided. + AudioInConfig audio_in_config = 1; + + // *Required* Specifies how to format the audio that will be returned. + AudioOutConfig audio_out_config = 2; + + // *Required* Represents the current dialog state. + ConverseState converse_state = 3; + + // Device configuration that uniquely identifies a specific device. + DeviceConfig device_config = 4; +} + +// Specifies how to process the `audio_in` data that will be provided in +// subsequent requests. For recommended settings, see the Google Assistant SDK +// [best practices](https://developers.google.com/assistant/sdk/best-practices/audio). +message AudioInConfig { + // Audio encoding of the data sent in the audio message. + // Audio must be one-channel (mono). The only language supported is "en-US". + enum Encoding { + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + ENCODING_UNSPECIFIED = 0; + + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // This encoding includes no header, only the raw audio bytes. + LINEAR16 = 1; + + // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio + // Codec) is the recommended encoding because it is + // lossless--therefore recognition is not compromised--and + // requires only about half the bandwidth of `LINEAR16`. This encoding + // includes the `FLAC` stream header followed by audio data. It supports + // 16-bit and 24-bit samples, however, not all fields in `STREAMINFO` are + // supported. + FLAC = 2; + } + + // *Required* Encoding of audio data sent in all `audio_in` messages. + Encoding encoding = 1; + + // *Required* Sample rate (in Hertz) of the audio data sent in all `audio_in` + // messages. Valid values are from 16000-24000, but 16000 is optimal. + // For best results, set the sampling rate of the audio source to 16000 Hz. + // If that's not possible, use the native sample rate of the audio source + // (instead of re-sampling). + int32 sample_rate_hertz = 2; +} + +// Specifies the desired format for the server to use when it returns +// `audio_out` messages. +message AudioOutConfig { + // Audio encoding of the data returned in the audio message. All encodings are + // raw audio bytes with no header, except as indicated below. + enum Encoding { + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + ENCODING_UNSPECIFIED = 0; + + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + LINEAR16 = 1; + + // MP3 audio encoding. The sample rate is encoded in the payload. + MP3 = 2; + + // Opus-encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android and in some browsers (such + // as Chrome). The quality of the encoding is considerably higher than MP3 + // while using the same bitrate. The sample rate is encoded in the payload. + OPUS_IN_OGG = 3; + } + + // *Required* The encoding of audio data to be returned in all `audio_out` + // messages. + Encoding encoding = 1; + + // *Required* The sample rate in Hertz of the audio data returned in + // `audio_out` messages. Valid values are: 16000-24000. + int32 sample_rate_hertz = 2; + + // *Required* Current volume setting of the device's audio output. + // Valid values are 1 to 100 (corresponding to 1% to 100%). + int32 volume_percentage = 3; +} + +// Provides information about the current dialog state. +message ConverseState { + // *Required* The `conversation_state` value returned in the prior + // `ConverseResponse`. Omit (do not set the field) if there was no prior + // `ConverseResponse`. If there was a prior `ConverseResponse`, do not omit + // this field; doing so will end that conversation (and this new request will + // start a new conversation). + bytes conversation_state = 1; +} + +// The audio containing the Assistant's response to the query. Sequential chunks +// of audio data are received in sequential `ConverseResponse` messages. +message AudioOut { + // *Output-only* The audio data containing the assistant's response to the + // query. Sequential chunks of audio data are received in sequential + // `ConverseResponse` messages. + bytes audio_data = 1; +} + +// The semantic result for the user's spoken query. Multiple of these messages +// could be received, for example one containing the recognized transcript in +// spoken_request_text followed by one containing the semantics of the response, +// i.e. containing the relevant data among conversation_state, microphone_mode, +// and volume_percentage. +message ConverseResult { + // Possible states of the microphone after a `Converse` RPC completes. + enum MicrophoneMode { + // No mode specified. + MICROPHONE_MODE_UNSPECIFIED = 0; + + // The service is not expecting a follow-on question from the user. + // The microphone should remain off until the user re-activates it. + CLOSE_MICROPHONE = 1; + + // The service is expecting a follow-on question from the user. The + // microphone should be re-opened when the `AudioOut` playback completes + // (by starting a new `Converse` RPC call to send the new audio). + DIALOG_FOLLOW_ON = 2; + } + + // *Output-only* The recognized transcript of what the user said. + string spoken_request_text = 1; + + // *Output-only* The text of the assistant's spoken response. This is only + // returned for an IFTTT action. + string spoken_response_text = 2; + + // *Output-only* State information for subsequent `ConverseRequest`. This + // value should be saved in the client and returned in the + // `conversation_state` with the next `ConverseRequest`. (The client does not + // need to interpret or otherwise use this value.) There is no need to save + // this information across device restarts. + bytes conversation_state = 3; + + // *Output-only* Specifies the mode of the microphone after this `Converse` + // RPC is processed. + MicrophoneMode microphone_mode = 4; + + // *Output-only* Updated volume level. The value will be 0 or omitted + // (indicating no change) unless a voice command such as "Increase the volume" + // or "Set volume level 4" was recognized, in which case the value will be + // between 1 and 100 (corresponding to the new volume level of 1% to 100%). + // Typically, a client should use this volume level when playing the + // `audio_out` data, and retain this value as the current volume level and + // supply it in the `AudioOutConfig` of the next `ConverseRequest`. (Some + // clients may also implement other ways to allow the current volume level to + // be changed, for example, by providing a knob that the user can turn.) + int32 volume_percentage = 5; +} + +// The top-level message sent by the client. Clients must send at least two, and +// typically numerous `ConverseRequest` messages. The first message must +// contain a `config` message and must not contain `audio_in` data. All +// subsequent messages must contain `audio_in` data and must not contain a +// `config` message. +message ConverseRequest { + // Exactly one of these fields must be specified in each `ConverseRequest`. + oneof converse_request { + // The `config` message provides information to the recognizer that + // specifies how to process the request. + // The first `ConverseRequest` message must contain a `config` message. + ConverseConfig config = 1; + + // The audio data to be recognized. Sequential chunks of audio data are sent + // in sequential `ConverseRequest` messages. The first `ConverseRequest` + // message must not contain `audio_in` data and all subsequent + // `ConverseRequest` messages must contain `audio_in` data. The audio bytes + // must be encoded as specified in `AudioInConfig`. + // Audio must be sent at approximately real-time (16000 samples per second). + // An error will be returned if audio is sent significantly faster or + // slower. + bytes audio_in = 2; + } +} + +// The top-level message received by the client. A series of one or more +// `ConverseResponse` messages are streamed back to the client. +message ConverseResponse { + // Indicates the type of event. + enum EventType { + // No event specified. + EVENT_TYPE_UNSPECIFIED = 0; + + // This event indicates that the server has detected the end of the user's + // speech utterance and expects no additional speech. Therefore, the server + // will not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. + END_OF_UTTERANCE = 1; + } + + // Exactly one of these fields will be populated in each `ConverseResponse`. + oneof converse_response { + // *Output-only* If set, returns a [google.rpc.Status][google.rpc.Status] message that + // specifies the error for the operation. + // If an error occurs during processing, this message will be set and there + // will be no further messages sent. + google.rpc.Status error = 1; + + // *Output-only* Indicates the type of event. + EventType event_type = 2; + + // *Output-only* The audio containing the Assistant's response to the query. + AudioOut audio_out = 3; + + // *Output-only* Contains the action triggered by the query with the + // appropriate payloads and semantic parsing. + DeviceAction device_action = 9; + + // *Output-only* The final semantic result for the user's spoken query. + ConverseResult result = 5; + } +} + +// The identification information for devices integrated with the Assistant. +// These fields should be populated for any queries sent from 3P devices. +message DeviceConfig { + // *Required* Unique identifier for the device. Example: DBCDW098234. This + // MUST match the device_id returned from device registration. This device_id + // is used matched against the user's registered devices to lookup the + // supported traits and capabilities of this device. + string device_id = 1; +} + +// The response returned to the device if any 3P Custom Device Grammar is +// triggered. The 3P Custom Device Grammar is enabled through the specific +// [DeviceConfig](google.assistant.embedded.v1alpha1.DeviceConfig) provided by +// this device, and should be handled appropriately. For example, a 3P device +// which supports the customized query "do a dance" would receive a DeviceAction +// with a JSON payload containing the semantics of the request. +message DeviceAction { + // JSON containing the device control response generated from the triggered 3P + // Custom Device Grammar. The format is given by the [action.devices.EXECUTE]( + // https://developers.google.com/actions/smarthome/create-app#actiondevicesexecute) + // request type. + string device_request_json = 2; +} diff --git a/google-assistant-sdk/README.rst b/google-assistant-sdk/README.rst index d091df7..1d59b80 100644 --- a/google-assistant-sdk/README.rst +++ b/google-assistant-sdk/README.rst @@ -9,7 +9,7 @@ Installing - You can install using `pip`_:: - pip install --upgrade google-assistant-sdk + pip install --upgrade google-assistant-sdk[samples] Usage ----- @@ -58,10 +58,58 @@ This reference sample implements a simple but functional client for the `Google sudo apt-get install portaudio19-dev libffi-dev libssl-dev pip install --upgrade google-assistant-sdk[samples] -- Try the push to talk sample:: +- Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: googlesamples-assistant-pushtotalk +- Try some Google Assistant voice query like "What time is it?". + +- Get the `gactions`_ CLI tool. + +- Create an `Action Package`_ describing the Device Actions traits that the device implements:: + + { + "manifest": { + "displayName": "Blinky light", + "invocationName": "Blinky light", + "category": "PRODUCTIVITY" + }, + "actions": [{ + "name": "actions.devices", + "config": { + "@type": "type.googleapis.com/google.actions.v2.devices.DeviceControl", + "commands": [{ + "intent": { + "name": "BLINK", + "parameters": [{ + "name": "number", + "type": "SchemaOrg_Number" + }], + "trigger": { + "queryPatterns": [ + "Blink $SchemaOrg_Number:number times" + ] + } + }, + "directResponseFulfillment": { + "ttsPattern": "Blinking the light $SchemaOrg_Number:number times" + }, + "requiredTraits": ["Blink"] + }] + } + }] + } + +- Register the `Action package`_ using the `gactions`_ CLI tool:: + + gactions test --action_package blink.json --project + +- Try a custom device action query like "Blink 5 times". + +- Run in verbose mode to see the gRPC communication with the Google Assistant API:: + + googlesamples-assistant-pushtotalk -v + googlesamples-assistant-hotword ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -114,5 +162,5 @@ the License. .. _Google Assistant SDK: https://developers.google.com/assistant/sdk .. _Google Assistant gRPC API: https://developers.google.com/assistant/sdk/reference/rpc .. _Google Assistant library: https://developers.google.com/assistant/sdk/reference/library/python -.. _GitHub releases page: https://github.com/googlesamples/assistant-sdk-python/releases - +.. _Action Package: https://developers.google.com/actions/reference/rest/Shared.Types/ActionPackage +.. _gactions: https://developers.google.com/actions/tools/gactions-cli diff --git a/google-assistant-sdk/blink.wav b/google-assistant-sdk/blink.wav new file mode 100644 index 0000000000000000000000000000000000000000..16878cd753cceea2c1b085860a4864ecdfb9889c GIT binary patch literal 55726 zcmeFZ1$b0f+b+CyG?Ppw9wGz@?oiy_U5XVcUbLl9ikB8BR}J& z`()gNrc=6za^9RFEK zBSld(wz*BPGmD}a%1C`Y^s}M!agFllk||1%rT^v4fB*m9Z?kNXGiAG+C!4Y5Vaxfm zk$vSTIYX}VZ+6-BkIjQU&p$Rnet+YE1d9QYYy3^Q`rRiK->>Xh@DzK|9sYaXeFwSb z{kJH;{x#zNK2-lR1dB2B<7dI>-W~tDqx+iotg?8ZK$I*|6e|&x}BhpNRMp07H zwFD1^V*exV^xyd|LjMYx|1M07l!4MvrKo5sjEbcq@Eu+B9gA=BE23x%!&$PKuo?fc z#Zx+*HU4?jgeQ?k&g+Vr0SJ)a6dI3X8f-i^4aNSsM}G16McQ9C4eley|NVc;JP}GG zjr{u0snUNn=^Ys_PsIa^uzw(?pr?F>6`k?pSB#+nX#Fb;4R~%bh6cRj?=e*H`)3UQ z_aUgDM(Fjwh9RNC{~HnX(GSnrfpb?wGZ&{fC@r&3Sq*?xAT_{qC z$fb8>Q?XnkdxETkRiV5|Dw4c}&J}8Zm*SN<^h+QAum3JS)Pc)*Jgk^ri)8>gM&w0U z5#<~3O=Jp@Fz+_MoFTG>@DSMur%191_Y=uWMrDB&<-#ZhjGV~dVjV{I{H+)va^1Vq zw|L)=gz{~`Jh?CeMQSMMypK^aAN+S=_SfeS4E@~g=YE2nzx`J+B(ltpAPWTfcMv4# z_;r{cJq24ohQ)}zyZ1hN z|4n+L^rqMnA?Rr^j$%uM*dU56qP$6Yn}XZ>=qWade-s46_Yow$`Hv8c0D{Ip5iG{= zUm-~BB8A7u8;dc+!GHYub?86y&EFlNC?d~^3@3d1kB9tUw?ia#SP>m4b5jbx6*^Qt zmD%>sExD$c+Q}UGXNb_&-`RwJht1y~`#Wd|hQ$*7@2>d2;n;r{hGeXN|M&NZ{qxa( zzWe`Q2>R{9zoW!|#^&F9#8_nU5Lxn9E#^PsKd1aAa>u`sh!L?Z6>BpGQ7vUl`|~>o z=g6kmq5m9}DYiQOu^GQD;K5yr6(;?|t}3=U-=!#GJ1N`07pHfhPI^}S_dbHfC=owZ z`K3tecai%mf_5NAP$Kvd^!x~B7U5TnpdUkjROX)%ERJmbKLizOe4hsYR}jqnFVLW0 z{}$87zh}=3>zYVSVk=AFKoapa6upW_XJXweHmaWjm8glI5_xD-Vo4CqNbE4;KP&c= zUt5x+#TJ?dPbUv2R-ymNiVsGf?|-WD?f<3d35Dtvk4@3Ne%kl{^4olV-E0I$>;B{& z66sMaZ9g{=%TdwiigieANM*drn~I-S%%%Q0dH;wRHV;9NcqT;3kVc-aj7u4(5@}Mb z2S0*-Tf&bZu?YVeK?T?UZwUJF^dqJq^!_hDihlG+zfC~!eVj<232J1X!-{&V_`Jlb zCUiiqCnyR8wO} z1l&vdNjz*GcaV4jc^3}-`RU)2itJIvCM}*da#r~x$NbHI>_7Te{7}*Z9p>8qzT&IN zyWVXI-4fm));#+UyOVe#r1_(i#Gdj;EqUzmMf~LVt;sC;@gROK;n8Bhmq=;lmtW%% z3k@5IFP&?+$gWvu?a$fmU#zdZ~+~q(1{`bE+_wJSNV@RI)?pdX0 z?_#J>BcU-rhQxOveiXql0?i+rA$C1EN}@J|i{6(o1SP_u3T+Vt$&vT5D?au=hoA(C zEk@At-+(Rd#l<=6WNzDw`Q`FGJPejY)CP#{6ZzY)57x3TcR$vMB*L(Y*+@tVmK z$i82$`|T5gfFd!A<;lBye>aAAkCE{ZswO=3?s5OV84)IOo?Jobn*0$tqevps3q>|6 za#HD?Us@^>R{1RMucB$*?c@9V#5XQ}f`7z>z6eJX$>+zwf6lK-ly?w3iuc}s_J8(^ zjP8Htk36sy;iSI%x9sGV_8mRg;X#1~CIg;`Zs(;u2@!Rf%PZZ0Y zSR^FVL97l!_vDvC{qIJN|JnX1#)nX_U-J1!?B{(yW`utT?+~j&v1t^&^RDJ0eC6Mi zeMJ)$_k(Z+;gxvI4~Z`-@ksJX;aYXOSml-*N{f!CwV^6o5U~A!3@5V zZ^&2VOY(ksue?UyAkUZQ$W!Fe@-Vq4S~s~PwwCe-a#gg3av8aXTuyExC*U&y$I8eR zaI`+IX(+dm`{AC^@(6jVJVRb5FUAvh$fxn_8}e=WF=UpjNRE+NBUXcwXD&n4MnCIQ z)u_r;b9^?Z>QYUxHKMvxU8uom!>RGq2x6&d0<(kiHTirIt=6DAQq$4fP1IxR4HZb& zp3Q^e`Z)au{g$@TEK@+oGe#z!DaFJx8iq%+(J$%e^sn?DdO7_a{R#aM-I%UU z2hurIK6Mi)eGd#rQ{#YdWl#q3laeqy$1qPTu#K0;Vsu^P=IC1uS&gxY7*U4wLV6;d zk}gZlP@Cik%x{pKBKDQq zfg`??W=je3Woe5TD0RX3mq=AWLlM+%P>x-$kMAV85=}Ar)D-k`H10hNgiQFh1Fw6a zrlsIQ2k7vOv{)_!q>|)g)N^_jbBWo?sM$xXhWmr<%RXi*unklOZ7p?UHkVfOG3x$o zmPFq9S~g1Ud?meKdms3W!WGYb*E&xx!S3_ADfb9plo%--@%Hkz5Qa(y`HJ|9&{Di4 zAED;Rt)vFhLdJ4$Ez*$8t|#*92%x zXLityK)HLV$@Booo#B|*A)uukP|E^H&gW7ma9u9=Eks%%PL@89ZPGz;hWJ>_6Gw=f zg(^ZFVWYPLFSVb^lM zaD%zuxz&74)rYDos!pl{s>`Zgsx*ENwv~KY)l&5@njmeoc9+(yy{E0Ly`g@ zN>QVt21ebE{3@bb`1P=}p-HAK#&v-|1+3TA)CBRZ>GINR-%5|!Io}>=i?*Jzh?W!9 z-L{?f`p!n~+FqY;gBULlpl;B!Sr>PkpQT!(&d@y2R@XJwxwMHucBfzZx(z&qc&+xx=%%=^YW0yEJ-_)fSWj1Vm%54q5ozR84e8+cwlTeDI-RAAG*=JTOjn9?w6l%#S4XZR-nrAc%NgZ5=c@1S?OE)p;$7tZ)|=)1(R&@VdeU>p z&AacqrZ|r}PS{%j&5D-(g{91u3L511%?rpIp8FjT*6U1$$nRsW z#CI=syL8twzm}>~a$4+HQRBka23HMyq>EQ?;zl#~sAKYMDMIY#GkRCK?amgCuC`^C zfWpD%uIBsZr-cKo5A3&Gm3)h&=X51LTOFc1ska4mFQHqH)inMG&c`kRbzfsX#a5TG&}quf|)`8|}4tJf7E{v!27AJ)W7K-kwpO)}C4(v%8af zn=8qg?NB)$*~VIjT4D;jnvWGE6#Smwu0U^TZ#TGUVIlRDZ=;V5ULSTlGPuN@n8>&t zv2$Z~MbC)58YY>jU|V2Dz%5;6P*I`E!f)l?KrUXFyut-t36hCw7axxfU~Wmn|-bAnAK~! zW3gEnYmPO*VRU!&t&&hL|GVIh-(Uj;1*9AR+j zck9M$t831v%J5O_bShkW#Mt1&*KTtl3{Ir`$7EA}(CGqxADTw8qv45FP6-5&~WiD%yO zm$ah|9%J2*L7}#=so{M1nJ`c2=8ztyslmOCn}SLOH4dz3Xb>b%(6 zcglUqNjtXLM%u31V(bs>I;Y_5;y&zI=56M)`_2fpFy2K{5VX^Lsz2R{naIxPbo_08 zttv|WK)p~ctA0@pR4wHXa2|FmlS_}H(@AkZESEB{?j-pY<%Bl&v4^-F{9x4^T%FB- zsT!`DplYF-$UlJg`GKp)TIfW2C#=E**o{H*3n`lFB6pI$lhVX=K_e`Ot{a4HIi&1v zp^Vs2j2DLst$^tQ-z)DAkbUF4H$0o%)!Yq1W%V4FZ7J5#*0I*PmW735%#+Nm%y9+I zys+HHIXklpa?a+I%^jRioBP^II-0w;QtJb&#VjjTkl3x_%<5saYghj{adC+O;fGBP z1J|j?(3hpILRHTddoN45{E}IYw6D@avaaSYvMqBb$kR2~gH;htBkG4|hL4Th7O^n2 zZt$d_&4CvK&S>s2@#0L^Axl8s`Aluby7ZA5g<1FVGi?1mk~o_l#@EydhTns#8-EG* zhPcD#hd&Md!xUpI4Ct$w&%6;PyJp)K7p^RfuY()J5@2?;*!%YbUd=U}M38f;;B&mevbrQSy!~%;7)TTV2rYTnw@sk0Xvu z-Mx1`kzR*)D#o$g^EK?3R^F@bZyYbH1BLqx!kxsIlsL5 zC(9DYQ_nVfN#K`pE6PqS-9LV3$vb5ZmAM_iy2SRF9kJV^@0zOUCevTLlL|k`)91Fy zeVOw`jwf$W;Tg+}!YAeqwjA#^_9xwnplTsQ!e2xTj4+19nLZ7AX6R{%H5}7dP^VB{ z?=Gic{n7kw{;K>@g+1*%Tn{{Rd>zFm@_qWgYO0}5NcFHgA!6`F;|}Ae!P`t?@MnQf zwQW@tmqDME??`u~OmTxx?K$chEDV%xLtgEdn=++Y19O5QHn@xH#_v&g)xa8H9?_?m zQ%nw(E!Gqk`$D{3-1*MI&hH#MvgI8$nBS1B5h>K z`n1+r)3bd!FLFQ4y_gr_)avS$m=}99=p6gI>QqQ#e3er9B@0TNH{H=RJF$_?kBpcd{;a|6{?yA%rPVe6zaaw?bI8Bo`%+nY8&>S9E1fnh3^mDeCDn;E^GednrRY%pE z&*$cIA98)T{%knCOzJHzgctCsq?J;{D)0v4sf)CYP2%I#>(u?!@#>yxv$~?@f%=ZB z0^gXONW0)GycS0ab75=NadmX&ItmU1bTkG%2i5@I&s) ztg)Gf%%seatcBSZa)a`==C;gxG3ha1N83Yh>wjXaaK3;K z!&G5!1BL>RLF{jQNlo8?kl=^G34yD1jr4ni9PlhQht&!VHhmU&S@)f$P?MnDsT$3A zrT&=PZk~Ovx31Ifa&EU{fW4&SbJu=PqOVYB3tP4w{F@e11f`{SA@Y=tnA9%$x>O$? zMnhT+tDqHP7ZjBypQbu7M_Cnjk&WgUzJV$fSX{%_3@Ci7id0?XcXNH&=FELM1%B`t zi2)uS-xXh|Z=83c=N>$R?e0%tv8IDFFFR7~k8HGUi}k#w^2^>FQJwU@gEJF%twr8G{6_FCN)JncL` zxGTHwxc7T*d8TQ`;2YRbfquB_M#{?JnhqB z1xS;5K9~2DH^lpq#|315b*+Mq`rL8PzRi|w8C#fC;LYEY7n*k}w|d^-{4E7H&DjN6 z`CIc3SXO%i*jAdlsvh(R`7m2fd(cqN_}Z9c=%snV{jQo2(8D;{_`9K_{WH{t8AOVvBT_eyHZk8bp=@vVHM^6m z$N$08sz$1ts>OJb0YK!`!dYC#v9i%=2%j+rM4RU@ie4&|aI6I%YK?{hMny4*! z-lx)GQ53!uoW4=MXYh0Sde*q#!mga|-0Zk*_t|>b?pfd)>`EA9;Fsf<4bbQ`0;aPh04Qo!&z46kmDadm&foEIQ#K-jlzl z>d=enyYz=l6nm7d&5h>1?NQA!UeBrdwwfz?lcA5kmu9YNrn;>zHQ`b$K18%0N_$v6O_*VNq^KI~D`dSFdLK|^0Jd~zVsr8d5_SB#Xxg%US z{Zc$9{mkytt`B&w8>9M+J;Ft6ml_N~qXM3)%kUqnD(U(dem9KJkI_s~-_eBW!vh}a z1a%F50N0OyrfLT7{R!dP*N7y+qpR(v8|kz0Wu)<8#XrbjYIm=}nStziP-30C78 za4Iembh!dFv5ITTRpE?W7aWUj>nky(N?A{Hp1km;9 zWcqu^*&$3Tsx~6j3!#fz&{q+)UMWWxWk#Z@A&B~Kk(*NkkW1M~T|s%OD4xGkSx(hR*}?^xp*3F#HN1 zb&B>o&2G(1Z7;;h&S|n#yZ8!x4PM8Wa8I~J+ya(o<}XKQkA&wCOg(t!cL}|y1SA|8`OhsrN2PTKbucR2nk8-GHh~966w|If6$DU;~ z*yiA!6YrY^ygKZ?kBgxE)@q+#Cch7bm zbefz!9C41*@CXjs8rb4&H>}HSW$cw4)tukC)_c^xZ-ob9sN9J9hVBpjw~}9_+M_py}?knaX zD1IIN5OMZ<$Y${15v}xC+>eo2pnY-?aa<A#^RRkksOrGzUZ;!74 zntG^ELTCW`S|#K|AJ&xWN(YefDhKO>17-X~k7folE!no*Ew&T1&q(f5?hsdr|AmX? zG~5+<+tt}_>;>j1B9j7gd6lW&i14Q&x|Aw>1!|1(edx{eeCA<1R}nqCfhe!+Omy9L z?se{P^>vwWY?P;ucary_udeW`FcI@H0@;t&bWMg~SK^IP{8+w@>Xb@UO;qnt2WrY{ z+G4ATZ&o7}*$TrzF%MN;Rd4y#{3ythy`UG0YmAuTGUjXK5NiRQsmL0UEcrV5mh`z) zR_ZDlrEKwrm>~WRe11SgwyE&ecNThQsqbfS%|>5>ub%HC$b#F*ne-AS0l^Q%?V=s{ z{D3@QJ7iVbgNt54GOeedA}iD#Qg=SnlW7ADM=^Klee}o3O5Q|%wmUL8YUF|Rpv#uv z+~LA+&~29x2QTof^fdO|bPsmNx@)_WT`ygm+{@ezVJWQg&h+i~JrRb1rJpkr;1Q{@tDgd)<&do-)YnxbRb^G{_yqnsx0U-A z{`+do(X_6iZN8OZup1}Bw-FA@o#?k+S+JiOuE!e!wO?6>lul|OtI z$P+#E^A>2NG@k}|{2;s#96;nG@M#0-m}mlx&c)M*`SyAzAWFRvQpN0^?~Zi8bopF+TtB-e zxyK>%vcxmX`-yLd?+!8$DdKwRpgaSzU^vr|4dcE?)QDDf2O^7MIo8n()$G$8&^*EC zSDKH3jb3v_odkPwAF@Ix`6y7xVD20%W5(Jrn~^_GLC*ZP+)ieYr)wp7#M8(F#iL)T z!hPXuVW3bKvQ#gW#LRt&HWWuDfVSU&UU@MHy2CC$Km@Ub)EzmkQ__6o?>urT*d||7 zOQ~SmMXd*oY(~DYE_06=1{t!R9Shpt!Dh2Y?v*(zl3c#5_6Ib3f)b=`M=lli<5t)U zonR|GU~jQo*k+Kw>4>m*VNA?fNXj@moIZsLggMB6wudbq0dMaqa)qS6VgYjfJ(1~b zgDoDpavh|SP9_xu`N({FA+H^Xw#L9ms*LkZV0n-{dO!4$)Kx4%E_oAZ=^!-KHRL~^ zq85kbzgh4vse>{V)tQjBhc&R(1qac zLKFPT-`Np);u=N0s9jWbP#^uN4G*{paA|_8x}bOMaaDKhcL!Re27vT(3VxGc zq}SbHkqp3d2IILM@!1|vZjYlK@!6(mYlQv!cwQ4cyGhZwYZSegRFe=4s^MtmqRN39 z_$C|dB>b+6GXx*fNNs_FR{f&yMmXCZtqp!PLXX>^FRjqXcR%!d2-<)mTuCiSpQ6?g z=g7#EC&1FeTk4}f)v%F$^4`it>p?4`>O`r`AuDZ39RaD&B6SgAIA5`77D;uMQZq~H zcE}3E+SpnZjfgynRLPNwGg1lUugE3UJsxDvNnM0ePpnk5q#!Sz0(v2pEu=m%2XUJ` zT$h7JDsz=aYIH~~7U>D0C;!@0LRF+r(7$4v{67a$m8w*)n!tyo%8wlJ*S4~_pH!}s z>T9LWnpAX>yGZ>RS))zrCrO1Bsm&riRT`jt)`Xs?Pm{V`_mDaJ@@97yFBc^;WfQd9E^I4U!$jLTnXP5MVrCWv7e z(D4LmLM2M2KdGN9u1zGhW~Ba1sR;B}qA8Vw{tCPIzX@VWZK|@eM5$>cpAxDI{d4Z8 zQc_(>C{204auw;7f8CdIH#tI4q`ykdD|rK1!9!*>t%yR&o&`sg^)d?eDtEp6sjN;= zIE#ELd;U5|gm?^)Mr<3(`|1m1$i3%c0 zt?m1$D4eLQyijJ2%%1XF!km+pzW)CsLr@`HoPtKUfKVZ!=sciAR+bUEBv_F<$TJ8! z1PA3$LEV4cf0R65c^au+CRD9R2tOX=e$pGlF@8QF{q%3jQ;VNQ=!|eLpBk(y6TJ>w_Qc|z@j7ouRTpnKL zFnR}lj(!BIV-ma-2D$y!>`O%c^xRwIs+%F-S5dPFk@#O!N7)p)mGHZpb#}84vafYK za&f*}(t6s%UO{ZME?1W6NU>5$VWY1O;)=V(r}AZFUK3%R#4-=$aO5#HUL*4GN8H`L zue^~$RrwOC$T}c@eU6Sm*0qLw2z5;G5z%)$)srzZ+349idMgzn7fN4C8;}X!E$xCW zLO~06gzaTPmDW=E6snHe(c78DYz4Llo5=2F66xEB8=MrEishvwc_+rZjjjrh*~mmP zJDKWqHBjL?#1>x54e2qcS)9)N0LwdpE=4^-UBpaZ4cKnKA)0<2*3b*obuE@BB7^=0 ze%MNQWZlG>Vh5br04pjK*3x~%$tob$Iuf?sF<(icoj3t?UFTuD>*RCNdc@Ic!;Y$t znvl<>iSit3HSEn;R5(l1GY2U5SHf=rZzPKwUD*w&)~<((C22+ zE9GFsgC$6Bssm#CiKsJ~f|{Mth?Jb5o?;Gn$(iDG^d(f@OU;&eM4swW+u(&>kuy*y zwpJQ0^rMzBkL7l#$($l5hyh|x^k_0{y#?|{@r`haX{R=F0+S*?^p=pC(Gq;h-l&k9 zO}9kd-Y7bPvY}FL9CeTBOP`mcnVnQMcp5$7Q$|x?!a}`AwCnIYaq z_B)r87pVM%|`FX}laF-GwdR1Qc?Fy2D(hS|DoJj-8Jf-3d@a7_~s&MO_2O4FT=d zquuafW>7!TF&O`cQVO_aA1XaQqvy+4DGD*pL#Q4d4?RKZSNpS5nP~XxvoJH8Kw168 zgYq$PAu_hHl#_4H=wOLvqKXcYS}Bgcf!M-ox-Qk5?m$mNjlpNMm)a}^BAPo6bL68h z$x*08Y>BGIUr~=Y1-;u1A0ZVrdV8tk@Wofk`!MQ?Ol__omBv<(uZgeZe1W14qUKP; zl%RiSa=7tKQ)VF*BnCk@??g1B1ih76FD3Y@(m%74sD?mvrraADjd(c)wS2oErR<1C zPp6W>LkGcE$)P zg@=+EytV?|a7MaJH)V#1N2R0eMY@;dkvq@}*zJ^8s7u|VyHMq&v*Hfat>s~CpW^wu znTMc-WAIMPGI6-_1XWVFjCngO?-%RP;~<4m^NV=d?}(ZW2k+Yvzj#L7z?(hj?R3ci z*&~5+;^@bU{_^-^BytoWmJ7SV+xLKUe8yj)VLxB*h+F`jTn8YneK z>|rlFo>EjhaOySrrT7yvMP8~L=3p~Do)RE!ra+eKsM69rafnPYtJw$g25{diP|^?w3oyqlADZ5*~YY!?u;mxUf3`9W)D)4 z%t5NRxLlNx$9#m?X^y;=xsP5Agr~WbB2}}45$jKqmy4OcJJM9%!qfTLL3Gjq{^g=m_D$Rb28K?@RMQJ`{RU1?Zo}wOu&nL>e#4X_S%JK>5 zmMdag)Qh&EMxwIRBUVN}HdU1tWgJ};bZbFuM0Cfscywmb1F$viw zQfa*qIZDuj?^o#vp1lEn&sq?O_zxd~+K6?rnEOsl9crkz|48vdzhmT!n^ z)F#gY6;8(}+ryu{D_R9sng!Xkn*I{A^?_UhJp3X2^YKJy%Hec6QyCi6EtQu9=4Nkz-A-OC9uf}25;;JfX1Y+h(htHqa8Vv|53nETuC$%nfmm8Pq7HvR?}SV3 z=>Dj|nMmJbCesI`j<6ZJ0^>Ev$_Jpbe-~8=(dTQ3pJj{fkrBK>kARkmg48+z8TvEw z${px1L{DA=8J1}&Qu0-dx*U2s4*vKCsYtUSJP2vBs)CE2*kEq%BvvD zj{u1>sFNHEYsLfKFrgm23+j}=pm!iHLDsuWLXGkctas=Q&NomOq$S|z^7K@A`qQZo zU@2UIc8x_$KM412#q&7+1sD8#w!1V)5+e z7~^9+{~)fOD&M5`W40gBq}Dx_DwK$iHUJ~23~t{Jq*^0N)d6~#tUu8q<`j=n)`xY_ z7t(G3q-zL9oC0k&7u9PdGu=)ufq2&l@gVN*B9FoB2|$_HtxuqnwjwjJ4_tH(a%CbS zm?L0UeNP>No{VPJApX~e8jtlC9Z_H36XRPhM#%jz-$T&*a&mRaj;K)+ShG*%bM#|a zD!1i)DFE5XtJG8=UqwRnghYriD<=>+T8jM*aszPhL-aiwPw$3zZUs)Oq<)BRHkArs z!Muj%Ie|XTkY#EyGyzSUs9uOSH<#_=5MXs0a(}tlkWP_XNWGz7MxZB0>2uOdxel$y z+;@Y8J|2jaLGJssNWA=outs1D>+9&QN}IfcspDcAAZ-Dr{@U?fh5P{rgE1@Iu``v{s;RK}YD$L;}RNGg8Wgv*n@XkDWDr_|! z^&|L+#Y~Py92IL5r1Q`vA0wwuR;iSOmfnjM7HVHlaWFJUNBJA9p=rj}S9j!>s&6Bk zeotFL`<3pFW;r6|J$NT9=^3CHT52HZ#Lp2=j2G95`-D)R9XS$@*ao^_E~-BlBJ$i3 zl5DE%5j#Wor9y6ad}XCI;zi$naVyUMF1$qS(+f@X0pg(9zGdD6-Y}seD2D_4=0qYo6KPdq5~Dz%Y5hP-bdP+=Q9--EbflDrhUqAc?ad^U>i3+>pK9*k@R z$#jk68fdB^#~81juXSr$XgcaQX$z4taB%DRvYZU*v|0L5xQjem6J#7~VPutj3&j_R z_1}kWcnELL5}qOIeav@R2u8ibRMZ-zdzX9KB4<6^+ebKtJ7)=Y=&1^*V_7MFCUzBE z-iE$jLLXrd)-I%i8*78UoRsmj5Gx{M0 z$G8G3#fXe34_m&S=oRV!(K676E07Vq2B~ruYg!^8v%`Ro2KMiC$_ziUBP9KORLu5; zjs2W%#0)^)Z8~&d7_7?a@ZCmW#z-V$10<;pcGLw(m0Ted`W&mBU`ySg-!PF}BQA_9 z#l2?BaTO8eKE*ZR_aj5JpZB1>K}~KCdJZh`x4x0yd8o`-?&*$E4D(GF%;G9Mv2+o| zEfTKyCV00YPxFmuFtYIDgdM1T(xbZL3Ot;Td~4k8ohR)BZM&?)ud5 zoeRsG^9l}`H&|u&5Gqyk+PFQkO38tVITgBBsFgUOmxan$HS-YOiiOr9 zl7EK_;+vrM?h?jIt9Gf+U`4@WwMqS@>MmcKyMfBRvY5M5(lE@+3CQIiP>(bpncwc5 z7xfXBAOYt?<1Irj#)V3ho!)Y&TRHA|;JMd{*k4m3Y4 zC{yrde%<_8`OONp6dcGun-`R~Cik11d0G3@H>Ms+S@(AJTRJ5&ZC+NHLY;52?%TNF z8mUbhv~Aw0bk_!*AGV}x^^5;Cz$1>d3E6TA`{wN1MwwapUpNAV5#VH*smb-`OQ;^8 z)@~*5)=UiiC8S;WuJA9zOrdP>N&O~ntx(_yu{19R&YaB;l9+Hp2>FOKUF2GUZ~2e zy7ARGnORSdrWO}vE;3NfbrmbHz5{+c#AKlnFxu)}=egjHcXxKpa(>~6!b+4F`!~q< zR=0g*3%4D$+%7Z~t~D+%vt9}>RempzskLw zb340Z_D5M+8RgQM)QTxVnrVU$;%Gm>Xg{lkXQZ%@Ii-m=3=gUwblwmbFhQ5AX`>EQ?dJ>l zZ&jbFI`RT@TOKKddippc?3vbJRBmUxe)esbUobWJ&yj14FAK`1IN)4V~s^(Q-4SDS4&evO$;@(Zv@bq?KIG-Hl##h59n+-{RY!f*z@9<%jQdgP*gWWLpev}V zX>G_2)AvCRT`)gJ3UwVaZ^~Ym{$*PEjI%kPTeP0()MZt@fMLdCrbnhy!5M+a17h@P zsCrwc9PCp?Je1I&N5}6>c`SE1YkooJMc5G>ToSo}fFSAD~~L4NxzJSF+UC z+x5bxvg|4NCHHbxPL;kms4MIuLiyDoNCBkTo7k-x~fPj%{o(uQj6yVi<~Jy)k&v)ev)!7Na`#2KB<{H>X;N~j%$9}K++*dMTpg^oILvp{E(=;1 z{!xkDaX|@VOY6$cOt=;Cq3)(I&eABe$7?=m!?Wxc)l%F!-`bPJnf#Bs5r%q!nxIQT z3C274tQp)QL=Kr6Y}F_7r^N_o$AX@jd}_6nCu!bnm8FO0Cz{uO5~wq642=#i7j6%o z9CGn&qUUzoB(M}|!gscNhj zP(`zittkz6FR=E??~oOhS~2-p(ynK_pPqVpaDD)D>pnS-0|#9>8ID8lz#3_u9VTEpqz82ZzG+rs;#Z6 zUddMCZ2^-a=Ok1vd!lS=nGvNY#ugX{Q!$0-Q|r7q{v_#1+zVZ5VqR-U1F3~-dBEtP zMyOB<4%}jV5h8}22(J`THX<@?dr)QdAhC|km3<~fNS^$~S)eH$Yssb*me_~O?>8<2=Xs(x2C+%u-UeeNMji0`La`ow!q;Fo2$;iqbRdC9SvCku}p>+HMxpJhIhTPnLw*4NpZ{0i26t}9|m z?o-__K_8jYOeca5Vm-zN^=105FWB|DZC&A%f{eTm@-(?Kvg>69W}VBLl)X7;S?-tl zZ_K~Ko_LRm)vm5Nx~Sy{Fw!`cRW5o#NVjRR3DdIn@`IyBU1L(=}&b zh9>)qoO*eO@=q3Y${(Klef~tN&GiJUQ4-bd!Yh|+P-8&N7FAxC{VkRbsmG6V&dj-z z+U;%Al;-JevPb7f73NzQd#KgR#k*Ts^!e_b zmpL=@V$C+oZrd~aZD$i-PsXDyFq*=jL^X}+6SFaDV8|x@aW++mb#smk+g#f%TbO;S zJ=HPL73G=attL36=1gyXDC)r1au=BI5eca0o9<3`MxYk{H;36?$2PO@VSYefpWIP7 zwyY9ai?W_)mCW*_holWlot>7H*(~pnrKj@^^(bO--GHt)yS!}HrFN$pEvm%DmeLG& zPoBL^VR)V?TS~8(GBUYoa<8-t=6vB-jU)I~#B4td*>D&$058eIQy3q^jH}AH+OPQu$)#zCDT*cNU9I6%0 zZdfsR32Vhd3-$RwrKP@D_Ppks6Zv88VRA!tcvy?X^VOHs7+s-Tyg7WD(V)M{&6KU) zHqJ;(%e*I9C(>WP9gzG(a*cFnp;L_4wht~FVJVRxml%JsM4RyC#>e{2ngD(XHNoq% z1r#J_txT_-9+k7&mL&eJYGjyax)gpavTVdg)2P5%`jWb9sP0>wM*1jqLNS z2J12Fe0#k6rZ9u5$>e$s?B`Lx_PJCQQ4F&fC>-~+aBZ+Rx3@g!$BLSHzOJs(_OsU7 z)^?~6{laE(G4ecqw6>Ifb3px|?!k+VVTJ|TJFvMvVaF_HwIaw95Fe|BX$*Eolx&cC%|j=hh)jw91~#nVTaj;z9Fp|NL%-C58i zyJtF`67_mv@|xsruUEf)m=>N@HSc4~D90ipQ9UQzRUxav_buzRxZY@L?W5)QMS0W* z++P&5&gq$bKj){s*Lh9zzs=7qSYUOyA5u-URYH2ievw$CGE;eZVyU|dhinfW8~S6&?%?0B>g0fG6@zs!KE|`%Io7_$ zI@|Jj_pXPqXi{U9)U`}2 z{K`D4pl<%mycxNRb8@n$W>?Q@ozW(Jaz^i*K?Pq~`q-;@-?EXx-jePro$GY1)3!!_ zrORbBG3|pxG{fkj!Zgn@XKP#kf|TrUGaqE0%57r3=gnaE1Z)Yt8C@}sjXPdqMP!|@ zSHbrKC+QEXKVY-P3|A-1iQJo+n=>*qSLM#OM7v)|Yq`qW)&Y+VmjlZM?G376>}|Xn zw88LNJC(~6Te}>VC+1y+$+p8Tz4v!-bMGB*Q>>&(#M;0Tx~4%bLb`^fh2Av{2};-h zpvmK3V@=xyDb+j6S;IQB;BoHZoFTa<@|#))IwpILAY!zRZpTjIK7frGqFJgfuRE%_ zz*)rS4vo2Y&VtM<>1$Fuy;=BjY|@M*Yf_yT&tJ@by(M#kWu`}uc>g^0sbEK3!^+3& zxaxiTK}z+96=D+Rhd^Q;c`rcmMGCOZhX6ZD2TBFQm`Q@Bz7N&qc_!^l+LMgWb6yu5wbgb1BIHo@_$}I~z~!cx z@FtN{BF~3U3i&9I)^w)N`&PLpx(ZO$*V(n#{-fnI_;HZEDY$ke>(Y)3GKS0vofbMS zWQ3_~usP_Pz#RP*bphSkmu_!m&dzB8FX~oS-P|$-bu5LpRnB>y&4P~lgiThR)~?X6 z)DO@eQcY&t$$D=$`>*ECd26%oq=+x-JpJnN_m88Vn4iQXEqI%pL)*S}Zx91jCr#B$ zwXUIRJ=@fq8r%HIw6fv37P{sM4Rh;ku5qPu*TnX5t-_86tY=rbPv>@eV}2$- z9{hCH%b(H~!&{yl3QX<>A@9xR32M-RQZ<)RP-;w`?VCa(zn@hz`QrR zYg*Gc=U)B#YSxaPdz2CM#2z_{fl?>_fwy~dHLZRD!oEMSD}lce!{Gp=7ujCoviQp;6Uv!YxJnRwCw8G z)UfV`PRu>~?aVI8Y0vIFU757(&952e+?j=~9o>A@n4`KeA)1&?r5}|aUg=(iZ_6%@ z-yb0=*_b$DxWd9PGp?w4UYBpiDba~f9%jo>T9CKz^=F-g3+57W`S`u9|#EEQQ&7y$L zMn_0^1Q#_Ss!e2a*f*xefvK8bm@wh8qekJHTuTkx=@T-@G|KogK&LI_8q#(##`g@XNbcCqSVk2-GmGZ37Oic$qn!JK z_n26kUe1T>HW{u3S&TP>JDNtA5=^6m6OFQ=j6Ov3igTl?qo%OOv)R?&$vMv8|1ONS znu}I3A9wxc`NF3Y4@iTl+RRB-<|blgR2QxmRsy|3rkGTgHk6x)cRkIWDb~-h)~9O0 z;(|%}>vC3SJWfqZ%}pDdzBw&4eIr)1PquKjZycP@!Q9h#3pXXCmm5~zR%T$Sk)@WF zjw_Q~N{s6jHPbX!Z{!XL&zzaom4$81nFSj2pn|+S2Q=VNOJ|owl-M(x@PLfKy1|1( z7KM$8*cmx1GCbmJ=%>Ne4Ba)0**5YNUw8Lcj;Yov=8y7z&8d|0DCb7r@8%HOXytr7bnQ+>*q! zgpzSFvERhbjD1>SUxYV!jy{ghkW$=R?3Jx`Efp%!8Eda)(B8ykbcm~M*cy%*D4fKVN3fRF&8g%$`kp%Z%OHIxua=)L!1u#LeO_a<4g z)uoa2Kd#a~xNmEm&b4jdh{s!hHO<0jJYYp!YO`8p9c&|4W^%9m}EU@w0hnDPyW< zRDxqVq5lp} z`g8QYsD|PD13UOHm5&uI7aZ|)cYg=w{SM20(+$IFeTlxcv87q(c;e3S%m5N(BWuF- znMwY&bQ=Pgz~uoZtxB`bk5#Lcd9ptxnXFDAJ+gi2sw$ zf#B%P5>J87(Li~eGF<({FU!BTCfz^NubFy2m{u9mE8=5pJH~>mc{RDI&Mx*l)(p!I z%YMsWV5-Dgf-Gw+*R0L$wVb2f>pTn)MTx*-$6=**5@Tk)>=>-6`+J7F6P*L>oaIc} zEz@Vijl$xbBbnKst7mq}+?FNJt6Mm|s9o`ek}KA(zHEgxXhCF^Dov{7R%sCJt+u_! zwpx{IKdg~dWl}g3*idbiusG8PTsQ4SmdMhd^zRFXpsjYo;bgEsoWXvPh4$pC#+!!uzoQeD{zmX zPZiA#cQ0_t?bR*)OGAyV4d3Y-=)IVMCmRfg`z1?C8(Y&|4~3s)i~QzlBLh+cxWJs? z-@}$f1XXTbxj|%&N~VxnfeSQK)H4;vt1{g zH^7P&ffZ{4zkCC)Oc4DA4$!YyQ+dhG0UpkuJqcu$j@c}%gVnpbSXB@A&BYqY zRjj6oJ)6Kfn&%F4=epLrB3;p7y}X1n(G_Pi*Irk3u)RBCEolMtW(u%&(*Y}C_k?4h zYxazND%w-N>akXmBoTuPTnoJM#?oTRY>7}ZPs~7-LM_?{JYHiq5bVPTz}>CKnn@(C za^C?L^k?C8tR8j8dRl#~0=xq2WIoW6ULY@bgZI}DoC-CNbr*q`8i%!<0Q7$!!Kga} zjLdv+E8BWTLepR(wx7VcI0jzK6ORy_pYq23DXK0yDA&Yj$&ii2N1`XkP-a(HU#zGGH=R15YFX`(Zo2XBd82iIt=~;N7GF zeaHcC(*{U{o8DGn)TH5h`y{O5rwgiqb0NbKmN71-0Wf@1aT^ZBpOIjVh}a0$#n3Gs zS|?M$2aCtK5CCIi1J>slbP`v9P0|-}sEjpN7qBqzfQmc^JjxWT?UP<@5pbtZkP$Bd zdGHI``~s}Ml49c+tc(u@BB7x#9(;|4xWxmR5P>z!7I@YH%#C;)eOs)@jsXJg7kuh? zIeX(d`d89b4M$xV2>eZpa!x@HU`6KwtuO=B@xR^z*gK~fBa;~!X&UIjG8GTV2TYDd%?7i!>V8evYG+BoVP*|>lW5!0=zZA%v%X` zUU%TSlD)^lc1#DNVWD6huy~Pzomd0v4pi#{uRrjQUjXMf&1c72^dnD4t|PD)`?1Dt z_69<+PzJ2hDDayGqC6YCo81mBK`;!wuv>3sCscGf=@!btE#&*(CU8yQMBE=jne=ba~fA`u!f5kwAwQi z{V`$3?_hq;xU;>tq09G`pfh-N_fda$W7WDQYV>SuiNLdsLX8{j>j^~bQr}lNOFe;w z+~S+f^##h}v$qzT=(!D?k=M5zjJJH`k%odtKpq&-+8TrRY6ad;4Wt<7xdCj?88G*M z^vp-Tfm%P19m~O8lwIOu^5hYDCU)~-efuL%EQ-U~^Ab12&4ajY{h6Yj#fUW9dv z-ar}E7386X0MCXTHX2;M!8kYVfk(O^{2r@i=b1e4)>vSdxE3$;J!*Sv0wX!n1Jzm}F3(_;y~sNQ_1zBSLJO!D zD16@_1Id7yBlLF&GSfSt6&3(XHA)~A{>kkJ?n&X>%r^7B=7<&10w~PZz?@tbH1j^? z!m%B4Uh2cqsfF!;>Enc(l;U@l^o@Jf&d9P3f03meXe zfOk5H_4wbhI==wXS|s%0`c)hJMlf*0D&Q_tfu~!KcGwVEw+N*p%;jC69nK-UOhfMY z8gZkI^kG|+=aQ~y!`(6F7>HN<;Ss6rjt4@3HvVxU-W`LDUKxqU$9fS81fea#?^EC9+Gq3?MT%OyAqxKWB%j)uMw-NPk3(%Q65dYoCfcvo>L@ptH zQ&Ko3h1@gP&S4{j)+KCLv0cY@3mYi_^URj}$a}OAtA)-eZLj{#M!LJi0wB%{A^u6# zl{WgH_759zU-(-({-*;ENDL>^G_BC3CMD{>x4+fcNx%Akwvq29F{6k%MR9el2|ec@@$55tR&gU%32|vEm_ekxPAsz!j1`D?QNer@AWyNt4|h`ddPmh*WS-SEad^MBn4h_4v?~wfP8!fRPKGC)PQ+MU$h_S znU!euoq^C@jQ)QKhqP98$m-i_X~E&A@I*poOOx6o6s1ZreHS~W1q zc=r)9!f9-Wu+J9skX`ZluH_{Tg34}6d7F5HwsQ+@hO~-_5Az67qxcceE*?h{i%)!o z?;nWUQj}yV#s(|#3xxZkZ#|8S`v}P4r}+IWj6{t<(+Yu=@)L?M)(iu;yA85WEA)qb zp%Xk2NUy2LTl0{QR)7Ps4tT~>7`vVU%i|0()pd*v-(r0E4BKnqsbAyqOFVvp_b+14 zv&d&#u`R>rCxgE|5#z_<*m|Nz=#KC1j;#y&)b`jq;#cXH+mzqh;6H8jP8aMo0_7Qv zjlN?tSQfJ|ep`ky=|POg&Y(wni{E?+UP1}**J5x+;=n}i3gwFl7-Me%OXf0@g>i`$ zik4dFSbo7i1UI+}bUWl=k~9O0;CFnJK~Px{rW+)(dhNc2f}J8gdjdUWEw-ja482vO zpeyE+G@4rH4nho%g54dD>)Pjhti$D9qG;odbKMn<^5%h!{DH~${2-dh-giHdi`;8P zM|^3PEM=~sw&bp0sjU`!Rq7MXG+&b5@oO(#=C|9{m-|syDwSZBz7!bf1l!>vv;C~| zkgK57Za5$s@0q}y_RMkDmpAan3w{y%m{hFHuVli}M?Mx!7Y>podDnXDNuqpa`$cb> zw9tCjam?C_6AKpEh4!i?o4mP#+m4+^pCd+8W}D#s(do8AQ`u41){l*roHeW54BNsd zkwv;*b8DILo-s@t_A6!~_mqopKIU#PC%t{GjAfY@y^e2$eTHSPV*~ds#+L~mgX1pd zPY2n$(nj9t&IR@)??foF4C3n8cX>3JV}P{=b+SF~5+T$~(0fh;4r~iE5h#ueo@8sH zvo4q*toNh6$T7kjFWMxqc!GdQU%*abWKiW1q3^eg0z{+PmA*LFL-%8-w9G=xzjY;Z z+XNSdC%N8G6`l|7Mpa}csb>i{2%~(KA=It~NZW0gCEOE60BO!Kr2d%-JWdV}Hov-- zakqg#%z{!;H(-W4c~(Ii<}yZ)%LVPcdS5V@>>6f1aBm;MEzvMlp)=XYYje-FU4**d z*JUy0hGhdVgB)cOII=uyrmge~mBD{uz$UHDZvzz< zS!^kCwDq}|L}|P--oLYUq-KQbfNUDjWP6~t;3v5&n#lzEZn!GhSK`XSGwVp_G$=T> zMSEU|@%$5Ep|BboA#Mj%Co^z%ELJn7I@de&&H!NSk02xM2ZGOnGS^~Hh!08pp$O&@ ztzr8365Q{B#T)CI%+2te0lK>*ko`|Fui3-8L@D9|=!aax-fJX}MGG0ycA13SIR-jV z+rhoRjtrv#YF7h2k0nrs+06bR+6|4Xqp~!pXLJ{pBTj^N?jetYI*Z%1Fm3xAEbig|UnY$e$5iOS_ zLbu^h$!2zpz>k~f6gXzs`a<*VtbKwDtE_^(3=nw|AymPxOPfjq#a3ohIo4`2cgkH4 zS{l*NN!aI}0Y1-Y;Z1fp=E$j%hT=VJ7hx+Pb&oqN9>f z*>qs8`iNREn9X9$+ZET?65YQ-C#)}YH|}v)p$5E-DG)sbGBX0|cu9ilp2w~(&J_DO zXtnlobaM^#bb>NrH_>(R7->VP6w&)bv<3C{F*tY!IInvXG?JD>r@GWVAB+c}XrXic zP`nm1y?tyG%%F~W=7O8h-ZL4zxv$YDjR)UpFXq5KfjNG_{0wE=?Q9g#$QtNC%)xx~ z2-ah-U{1J{iGgxqlwhduB<3%7Jj2ktb^wQF6h_4=p$t9PZlL`ApqO3DXB6zjJko`p zYbfRhhY+)TysHEI=XbBpV|R6Ox*SK`qr5u=Qg)T-fn=C$i*&ij-o%mT}X70&m19#L5Jt&h3tWatvE76a=o z4S_CUI*=5Tz&v|{?~*~+eHfy0-u1-28c_*ERMJIfp#EJX8VSw9>cFR8^j4ivTP|aq0S^m|1{y$Zey#UcE}^`( ze9Jk&KP+R?MJFYZ(9Bv_UR%b4+cg>Ovk5mEXLzx@S9xu@US3;@L{Cs#DvR5f*OmvU zEwAk-p*Y&v(Ze;|(*f+f+R&kFi%Z_0B+DcVp%Enzyug)~Yrt-%1E>C{OW{@mx2*=^ z{0jTExHa;BcX3a4uiyu0Irf0M$&a2EP}=7(=XwA%vK>k;TCgR5#Eg6k^CNo|^TnRP ztH*(Ba2>OR{(>{OPI?Jy9^KjH%nh`P`MApC@c!maK^yxXJk3O0S29A=jq|+{G-Kwm zDdHqa8qm7^8GN^|wl|4uhV0t}dah&51FXyJF0drGU~O}mI{ZjEcLqrdGjbm}izhFVry?pYJ< zK3BNFUs}oUmhNg``@s6TQ+~fH;v{Nj15g?|TyH#jCViULyaT(yd1>suXX6TnVfU3!G$GH-n)1lD6 z)&9!nwhn;W^av;;F0}0ern(w*&?6lowuk2HrrU-)MI*4fk(_s+pmVVs*JVvWfGBJ) z?JbdEZiw=AzeW4kPj0+4Ueb7E{XeR|3a%?p10Lo)6zT)*+wH}6hy4>Yi=H|TJ7;oF z1zn||)opdIfF=QYO{Ds=%*>ATT>*M$6L$t2(q6cN(-;avi{S{6g|;v)Dy&{ebJXc5Q39U`8GO*~F9Mg7Sy%Kw$W8rr=w|19N6u%i>ii`cG$ zz1#_BkfV`fjl<~J=d9y?<2f!cGJ5c-`$0Ws0a8R!nqu)adS*e$r=4>{Wim_ zk_pE8CcQ~lHWHevA4-its5?t?^y3QlX5IW?eRuuMkk=DlUwyqhX;!i~HR$8Z?4zX} zRec%_8p3`PHS@{zUnlPV;)}K?!(_4N%nB5|ONa2#=Si>dLml&^E z0`FC48mw#5zv-Cx-t|+ejR@K(?&q9emTc@@a<|x25Rrd8|4`w}qTRrd2rL%QURe+A z^Wc#1>y>`5Bn(>;Fk6)<-s;=u{?n0XEyYUOGN73Ul};<`Y3_{?#%u2?(Moxg`i^E? z(Dty0mF(f;LiXu4scXtU3Y)vvSq0FN4J=z>u4c8{raC8ZD}8mDUg(=I%Dz;NQlC^e zP`P9?Mc?~kTp51%hOFsqq4HSpJPk=&Y7}%X3rU;rz{##xpSwg7lp%1&gOlYYs{8r z56m8xlaTAkX_&h-cU?hmQ!eNA8&su9y@dGnO)oS_Xwaob-B5$9uIGVmzNH(`u-cM8 zizADd16$L(NLQ>fO?1YKj;Ln^YQqXEy^1h~M+MLH|5;{ds{1-}i<~=b9V~mw#+6<& z`I`nK^fI>idUj&QkLdv)+N9``dw)2Q+tMhsZ)T&zUo|c3W5E2lW8L#{PXS)5>swGHzgRvZqCi_MN}-1tVL zL2)%|4vF|h-Oo4B5^Fe6uqXF)c2ZWWY(q|m{O(0xmvprZ^uT|a_ zTBzA6vkT%}`z_N#VBT)8#0?fYB%Crr(>g#I6c98^*H+z6+C&)W zeq>o(a|3r3;OeT}fs+Tn^ zYi>^e!VyM|4a`h+@5t75es6rTX$-YVwbipJ)YWQ3)^|FG?&He*EA z(Yy%#So1wDL%KX*WMpRbtF`{9)30`M_5KmZwJjxIyN{OcFWiyMWRA>So7FJ4ZDDI; zSKCles^|xmC2(iNqpF_j?_z$5X;`gyr0@RRHrG+4E{kxU^sWP* zP0chEjD*g8B=@Idrg^Qwm^W3y2k61Bc zPygrLm%YA~`t(*&z{7$IBr#) zyVaJ4{-Ol8&vZDiRYv;{QE7jD{3ENdXpUvE$1S#~{etqtiz?TxI=||%sNX7G5Bx=W z1j%hv;eZTBiZ$uw8{6Bw_X(K|ibj~1aBF}Dy5M(5WAwkL+A5bxt_tQjJw`6~#}AcX zZF|t*_VZhUdygMfdlLCBH{a$CR)tn>+M?Z%X_Hn=?EK}y zu-bk9hfSYv5eIfyr$%Ul=w!&$1U(wSpy8Qfl^$lQfFkI zG4*~%c;P2=CGS<~70sSdYvqV)En;G0E=OlXs)I9>V!>2%)BGME=e%wID(dCR*S6%~ zjLZUwxrV2TM2z}XD{yu|4b48)a<~LcafO+><|@*h&x`Liy>b4!{g(IcorisrlJg$A zU#m_<&1qFKw9}NXU*8?MZGf@++~$ianYbB+J3p^XTl%{HleteqUOs>O!~52sN9!lL zyC@W)^=rCYeA8`4uM^$FfECtPYv%uz!I z6mh!NruZ$bhPG+hvPYwLF@FSgVV9S>GixNzcR zR>oAX`$L>7u4TP-HLgbJHQA!Z&O0Te^FDt*lQBKBYHoVr2V;bDFzcsM>FNdb4Ve)d z2fWS_?U(BNa-k#+Bd`ySd*-i^WsJECvNJP}W!}%ORBG!-XWz_wvo@*9huFf0PQ83mXkg8cO;)u&*>Yv0g|%i@IU4j@ zmhEyG;|rs5uVju(k4@uJ>wGwoIj$(*GEiVvjR{{Lv$1Zkdh=@TtR#;Fua}_9gqEk8OlePsqb^12`t+B6OTI}hn_RxXq1i>2HEfZ^aR2ZAvFjJm!A)`T#y>LRQz_m~m zt(_8awEDo24M#6Bzdl2kMlS4UZcifFYZuWyLg;F#yG^1>}tw>p*kISs#2Y( zJ5kceogr^EE}28e zKUyz|M0LV+xp;O#wSsMh{q)0(+szLgJ-t7&^`&}wiJ}ok-TTBj z!o?n&eR`QwKd~S-SCcj9^VST(=L4Tr8Pz}P(&nU{|CEwDzGSogm0+i$W=OS~Ct4JA zn$mMV*j+jsbSkI#f8TN4OHi_*IYvL zZbPM_%>~hgWyPyaMoXUE=KP&&FWf89EAMMA1+EJDIy58rf^NLJrEIOJH_&tb?v+sU z-e(@-BO`Fsl6!yVCL%hWyE?$)5%ugusGgr1Qvj52q zVO~lPs8;Cy3a=8~zUGuVJL}YrO^x9E$FWj(6RXZtOJB99i@v?-8(eXGZt>cydd{#l z6&p2a!E3@VR7wr463|IGT{zeA&U8#asyL?jP;plAQ9}a8q#`g$T5>a)N8sS>4cZc# z8k!b7U8h!Cq<=v}`Gz3Idmf%1Lu|2N8r&}Rw`_43yb{R>l}r0uP?M0qg6jvi*0xnw zmFJ4i38Fj;9oxaP2rddJIFWxE$SyzgTW7kjp}4L*K;=^>sWX({$f8AG`+jsbGIuk4 zl^>b)EdA|!Q%X|G)zk*>e@Rt;WU{6DAvTkDhjhKVscvcb>l*GRpIfIk{kdj?uqpoi z#D(^rMF+D(@~0TumaVZ=vv1(OXR67f)ja|ZSE^qvu2#dk{p#L|?HMW8h}b@^J{CWt zxL94-p?HeXS@soBTFdNGPpn7>A1+HU7d|B7X4tiW`O0;|fsQ?3tt1vzEqYzFw77#I z*Z8Jvs#SnCwUYTuzESgg(9%$OSeuZTfV%2E(o5_z@PONU7P}>w?SF3zvh+0{u}pKE z^W?CL6caVUK`|j)g7*YY(cV&z2XAGUFw}d^wbE8xI@eIA=xjl9!O7x1rXpKE&st`S z^e5#Fb$7pss*&<_;*P>i+z#7TQ=?*MZbIht^jfGne(%Spb^K80!|6|Ba$gw|?TNm@ z(qC0`G($sIRlnJ|pk<{dak14y&-fLI%=V8(hjYdiyuqklZCmd+<0)V)(%~wFu6FqJ zs-t4t*PdFtd5!ppCH@bXv93#yKU}AOTNG*tE*)c8W}S-M_nOnQC5kti@Ze`*9VhkKy$`A9&laQ35o`q)PUJR-v#{^@XG(9qM4+R zXaM_E5Xnt|@7FAh=Epcz*uQXwd1eWRip{dKst66}B)WL*CjW-&E((9?4bdp3h3~pM z+|kn7$ULDm$@shRH=w;3XFcw?uM4{g?6aFtp-q&|5&te+#<^@8O4Wv+3*P4lvnFQ5 zfAUN3{869Q=Ka(Uxu0JagqY4+TY2oFD5XFf9nRFA)2wl`p0y*xWm=z7Agb(qZb~p6 zx5({HtVhb8nX?=ny`4q-RFy*JR;gKIT+OhW(_#uLpA4#`Iw4MFW&kPp(yBL|E16_y z3QxQM4$;=up3uBgca+t|dQm1^4LaJ_gYg_}4RTab^IPHj!J?Uou{)yXGt`<^|Ok$74|CD7?+s6_84xjph$E;)&lMu zR}}lDHP{QD4fYSEzG8FUw^_)TD=>*q-8xXU#hFzmEh#EaDUufrE*fTdX70=l7VFegbsqy`f|3K;Y3=Iu@>imP zg0HxA#}11LTs~Rh9|a?d9vcGTFSN(~JrKDjnNv9!IQymAFx?F;qZzAuBF$pXd#|`R zW3{7?qsZZgs=zm{+pd|Y4~eWonlEpxDpHU3JE=ATf7n7kNY)+*L@#uW&Nx|{x9n!= z7f{=uR5k&%^o;$9^NIVqXT0D8GePtUn$|l6RXv;|-?GYhrl=rqXIAc~pFiABUHGo| zn=WsgfA~44seV)G4ciX4QX&rRUAJ-zWm9{tLzR1nr3Ih%za*A;KXdK8wOz4ghQg-# zi;5+~|Ao)3HT9t27D>kK_PGsS}L0&M9eLw7sBXLFd9AhDl}5 z?6celJo;5RqxhCD0RCD-%f_2ZN|Fs9p#$(%Khs#(66cyB zSS2~GTxH+ z-TZXz6`;PmYYwZ%OScMFaY2r@mJri8{j0*_f@(#+;#wtJO=~P`T;~Kkz@bZqPWoH1 zi)rhd!Qg9eH~G+3uw7>CbaJ=^vGKa*h{w3mX)7zm2^0qXxM# zBdQFDY!LKBK1DDRderyqXG(76j>zhi+g{(^{*=jAX9UY5?^j(`y-JONG4G=fR4J$| zj~o_~q}DNeZ6zhf!fE++@;c_NEc_ZA?gq|P+#_E*(R)RX=6>Meps>I{v_y(G zPpSO}Fwi`P2z}3@u?0u+GYgLC#fa&CPZ(P${ZiRkUB@rS@0n&{z@ESvI=AY&bb{yt zuG9D6y152p_24O-2u1MA+vQ|DLSb9+YgxSV7#xne_}TpW`0w(2sp_eyDs8~#c;X%3 zm|K}nl&mpK*Z-(5Gqi$xWsoJ>7K^LnjHfo%q2@Ey;R1EnQlq3OUz2q<{oTi?Pop!K zj1HgXWVFg2TaaxW=?TydjJa5^VQjmIhaq{D@}qiGz8J;?7X)E zdoMG+#TM}a$tGE{s$)QASf{AxRe!0vzVh?XZQ5#zWKka=rqX=<+|{gq8gGI}wpu^2 zWN6uZ$98WorZIaNn%G^$Z{Q+wPAOEiRD3HLEUdzb9lfpD=6&XGE#JU3u#2^u^|)=E zGsM#c&T`M?&Hd_Wzt`^a1J6ZSLp5K$PxY7lmN**fRa?O?s%n304{{ygK0|d(3#69> zt1SVF{_1dlng3k1M`4t1X5V|KxYBIzETQHqr4r*@_+T9bBXeU}A0Tb}0FV8hcOX`c z`Z>R~^fG=|G$XfDW{1xMGJnYOW>(G&&H6gmSh&&D*rU>Ht+uMpwd!xfRt05-)vo+B zVt?pNIA@*ow~1{|jNnZpEnm4}1;2{2CDpM;^Sf-aYP9ap(6^D5s;r9|6LCEFsQ)I} zS@t4O0u2Q+&S9Mou3b07kEquUb0;U`yTSB?t5%-y7`s<8Sbkn{O7TIKC_X8;?#{74 zv#Q`Y=COXZHL%~Z&9|+vH*?kWPGDL~rpf!NKKb4CALrLlIanU7=%G9WzpLdEhF$7Q z=GI^>YLPSBW#S6F+XO3wp}-7|7Jn!EMQKoP_WMd5q--KHh@J{ga*e^uE3(`w3o%tK z`Cu4pY*w06w#^a(?fw(a+1v!EvbFU@I2&4yl{73okv%wL$tQQl9?V;7WOmEi4=(5} z(*nYj_)5pE934!ao{j@q;}FeV_{f2z2R^MonR7-~**9`~&kb(Nl$zLfgQ z_o-$eMx~M4s_dggRoDyfib@jBg zy3{J$r^@wzs0sJ4r}{;{OPV3>AhI)2a5c!q>hfR@c@fLFLag_H#axG{=pOi|)WJH% z9^5~Knm#aiSm8VB_|3i#KCJs~(UuLR8%+(seDEuE7b6Jr(gJa<#+&Y%TRhRVx=2B=1>!gTiFmLY9|ChB_o`OB*ceXg6%OP&li zK$asH%lAt^ONu42@>KP-zE9n_MM$|RJNb9)GjU2PcAr^ zyCLUAPIg|0!ed22h1c@3^R5(pHVn6J^bC;<@$VQ?H*|^irXpFfRC_CQYNb7){R6&K z&yvn%rb9!l6?;hHl`16z*sk!Rt0S%^n+P|nu7Q_>>IaU~IN(b3QZ-clRz(?L0ITsf zcMh~A*k0R3&XtY^_NBHpwk)`kPQhqqEv{Y~q1OKdi0OP_j_Lp#v=Z(zvCJBvcl!f{ zJQ5mKHGzun&3+Ia1Gk6}AY&y5WE~X_@M}B@t>P&BvJ8?|a64%$9V96OGNcPTABf5; zSiSll?%m&Ft#mlhOjn_Iz8~1oDe!q2?m5W?ao3^gU){alb=FxQKB`fU#$db#TW?r0 zEWepk(0{xvvghy5{V7+DYtT<{{o}BHWpP3AsgmQCdKkl){iX-L2#E6Us=TQ9SvAUE zrP~=WIUrseuRbihEW))@pi6R@N6<&wBzY!lr`V}5$_4TW`2>YeS;Oy?roOI^wt=5q z;U`IF?n3|Oymu>BU>~~}XO_)qd1jtxo@|y`W?LuQbDT@K<=#2Ku6zkBL<(@Ib$~hF z4V3yTc!rz-l6ErCRqcRD-v&I>7p$4x3k93bl77-&P$v3b{syXKC5pDnAxcKM45b(^ zzb2E&hGTVK2DC;u#A_0~aGnDxG6Ot|NGR_N_eg;c{tbQ{!w{!T_boVe`~SJJQr>~lc7Mf4Q{ektmiCWf!%V+ve_&T?Z?4 z@{c&jEcAj7!6D`i6lLc!D}bg|L#0j!EJX@f1e@S)b;3OWu6C(dcOL7i=gM<_gsRd< z_%UbTzSJ2HzP}Zio~`g>=>>1-`B+c?1O!K0AoiN0j_pS5;^A-JN_+t>dgmo|;iLCa z+8MsdYh^oS%Vig3x8ZfMUDi&lo!kP%D#|Y!a1HNo&PF&229QYhu#L`$u~ z{)8;ON~9G(f!p3^$r$M__^;ZeF|wJmv$Dg8S~rZsN?gYR~^tQ+h|K!ua z2MvdB{WM@3X2D_mzWW+7`LJU|DK5L(C|J{BaF5m;OTHP4#%PDY?U5<6)R#*cUz_I!#cQ<%k{e;!% zg3ifaje#5a zRixUYp6a((T~Bosyv53hlLKR+6nQ84W7%w|X9S}ke=S)dcB4iu1YYa`GYn`0BXF>+ z_lRdOkP&)!0Wb=UfQk74Kl^@e1sv}80vj^~E-tfn{bWOss?J?eLNphKVKqCau|RRl{}6U>gmm z^c2s2AZyx*;G`5;KwAUnLDu!rSsy@JZY$Y@wcle$Iqd1q@&Mtx5^-PRucOm0M%9^1~FF_vq zEVIfYHPFAEc7X0{T>_kwuLaZ%ObyWEn#4lwJk47FseU)q&s1xa z&*c)?CP`JX23mlZfx>MAJkKkz1w38}@Rk-Z^OK?U*aJOFC~$YzovWSWoV_s*%fc3j zzUDjlHwmC17Xi=0D%>B?ZGQ=6b}OQkfO$u4WT7dd`r;}QpX7ve74l9vdfBh#`{mc= zF$$rgiQ*L=4V0ghwM3uCz-9Ond@cuJoUj#+_kY5xa261youL?83{S2F==X-9#ji%0 z`n!(7d2JzDn#w*Nh?j9zr{#tv!7|1&8POV4%3|!7Z(3&xH71r^DA{K0P-eIEw9B1q z;9XzjdgsX(v}UfbxuSRQx!j>n@_(uMLwj5ML>GoDA2o69W0r2Gb_%Zcbl153)~kb6 zOBC;Ad!T&wtB7I0gEyT3qk@ZG8&v8a!;4!AeZ(MFigTW`7Lc36;ox-1xyN||u~`IX z%Y)9&@CCgMJ;CbkH1`B}ugw9+?Kf|CjMt99Z~ZJB*KaTz(ev&SPX^-h5*&83WdeC3 z#s@3qf632al#q}6!E#0(BRe8Z#Ta)F^g!Z7-$4=F0laq-m|lND2l5MGqe47SISIVM zRZyj@hw;-aIBZ@@nO^zKZ?xu1ZL)Tbt~ekd=;t6?;EjMB-9BA! z-Er-YnoPe*>buGYiX>T>w7IxBOLMwO=-2L}R6%fZe*s0sIZn=T2>z{|G1A`a`0S|c z91SPgSl3ssC9c1qKl!8kEgby%a!GLb`{?NpzF7w_QR0D|(*jLiioPunJ=+A4O1w+# zCpjdsO1i=;b+vTAG#`$(kEB_6^b=;9H;}C_Kn?OZywHBccyBIHpR+KQ8VHTuw!rd5 zB3de-Mf-xCc@Q=I5OA#;*CMAs{4i@f3hjsOHSKA(cw4fyv2}*!v3Y)3)w1tOHQN|F{YOH5mwW#bJF~6DuR+t&sWH^-p@i|?(SJnV7kgfc_(?n~JYkk@hU0lEd z-B|4ajS5Wdf&Sh7ZmO0lXDjZ?e}i}GT2T@BieCr^LX)@@_2U=#qP&GCb4_u52`*_LsHrrDm%>B1+CAldf>!-EUUDLS1TV6y;F2DO zYr}Aiot}z1K_{=XEr1X_1UKSH$qr!ghQKK;1D2@Xp^U=)VuurvrMm~RwtI+2x+0cG`Q98Et znCXP6pXt0&gS;MKTxC32dfqIvb++BG4R`y4OWG52=R3k7q7clhnkf8K1N^q4W?8lU zbUgwN1OxZ3!%>qp|w81fIq3Sip$2(;w;iLKjh|EBtK=3Q@We%WPzv1S&`$LQN zAX?ry=T+3L<3OX>oNv(%PdUBLwQvo-?b?kI=2rJdxQF)TT7l832Y;{?P>ZKA3it@^ z*)rxZTf$xzDc~@eAf6*0EuJb)Mm7qEUePi1fJ?wD9fq7V0PIIO=0xO@kw)6JzIx#1 ztp*E<_5K8A@@lL-cK6JK7V%E+*Umg=ny-#K-Z25aNP_#7gLRB@{el(FKznnPunZ`# zMPLr}a_q1P?YH5=+Q44d@z^yBedP&HUBM#ECU&tw5-GByURq7@P;pjqLwQcU%g?T! z?KjMSlK*hO#cDU$&kdFJLmLQvZ!LA(X(|C7zhH?9V$h+;@<_brq-iaPR z0wtN_PJz$vFtoDv!026tqU(0J`kjU1bvig;Ex-U<4upD!Zz)Dd!@=pu5w2x|a1~|~ z*l!Oox0%b#1h4Kodj96nDxU=8ejhM(u3}^s2bKKg&{*9IH->GT-nRt)mn(!@-QPn2 z`-mqT*#zTy&vh_UNMU>uLk=4-T{$5*W1PI0v>j&Cl*(ZTlq$C!|m6d z8@wf``-3ksO4dIJ>*~@tbLpNQ<*oei5{?;Swdr=~L9h|=g!e_v}kMqv=tisHEBhb_`AYU)QPczCh z9$e5gPXcBmo#Dkmd}RfAp;Np!z@kh6zI;BE!54y!B*Mt2CK!`>!a3|puq?lUJ0&;2`@M=LC5Q z=+a?8L~MXMN^__NalRAc{eljTT|%jNqBECU$1WBebSC%`g@atZxUPs|jAJX%#``dz zTFX7=es*?-wi@G}Z9NFZhwjKyBQS3~?!XGE=dyPbX4~(ft=kJ|{~K`mye-;@%sD}F zTb8Lvmc5saSIQL)WFhFMn@CoRuS(mBN5g$NT~vyEP?f!jV{IrKOcuCNmCV=fFh^qpKaIHdGeFyHGX3lmyhKYPQK&j@YMyX?Pve-d98o6S?aZPi7tUH0Y75w*AMsQHTD8qqsjJJ&`rv=lx4+y?M_in$Qm zY}G8O6<4~3>&|)c(H;PyG-;^DvB*PwKn#Hs9Vw>$ond?B0RDY3p#RFN!jHuYQ^|`sn@qbQ9DN)-(ICI^W#gP#xv@$~BK&?W-?_tEsz=s-xHA zdL&Kv-LmtzRQMNE$=9}Amw{&T@#~wu6~le**T-?D__T)TY~pt9R!23cS?qY z4zx|zZ&tC|c7|tmzo6R8wnDw{OmHn{ZsB9eX3ZDYrG^watDRChykw59$p2K)C+m^W z1#Er2Mo>qm;Y{Xr;+>L5p2N;$kymbYWZ8=RhcLmV)r3>jtoK}5FPTe{XrI89N%ZU- zXJd~-8sp_0>7q)qZLZDE9rB~Hn^w7Xm_o%qa*X9#Vrro{Ne$!Z&5z*(haSuCn_F$!K>iU#Ns!cNO9$ zip920o*!hT>@cs+@m?tR8{>Ih+FRPp|F|Q;v`8^to@DcPTEsf(LRXY$o1{OZa))@L zMRkN*oteT5Y+KJ3&nl6a5x6#auds)J$LlD}5%=;==0v#iKHhyqaGHrl)(aL!GG(qG zxS`Sn-yc?;;I8~Ox54~eTt&9lHNz$n*He77hq+bKo5Ex0^+e)O??d-i_z17zrg-;( zUwBry0dAs0pqpU>&MQ@`X3EI8$^V-X>w}H26sbaL{0p zZtlNgWW znKPQ(jJfJ>+;z`%aa~^==Y?ZQO)$yd`R0f>vL_vl7`y0Oa7>?hL&1$`1I2>&&`at8 zKH5-08_a}l>@HzX@YQ|jhhxDMZ6I9c<(TET{%HY6X#%)fEx`G{!afI=?Sk+);@J;s z2WP-TO7j$e&icKeK6>9+j9WM2%r1a~!yeyMaMmoIgYbMB0SBoJDA@&KhQ3UA89o=p zhd(9!3-`^1Q@{>=3r6N_?{hc_l?j^Q|6$>LTxTB#9fLDMa(vDKckYBw?BU=OvlQ1K z?}N>)0nhy!7#3+@nGj3-f#5p$==E_Wx(9PrPzg%u@K^%By%{)@?{U>B9IX2qn2$~a zYYbcs%-O0!@8T_(p$YI&I0?t71i>mWYstB@Dg3W$3l0j?;WIM>elPPe$KUQHuj26- z9bWgzF;4gu9cLPE>uhAV1z}S+ofR9W(iv1ipD!Q=6nG)ia3mKFJnAU zT)K4db`K%q#JKMR4xSOC4e}O^h3-g^0IX3QcV(!cbj9`f#c(E?fzMO|bDsmtxC1mf z*5m3exuFNa8TBbfGm}t0sqhU-o{F-ZfL=VQg>=B)E$}E6S7h^jlkf{1xV%C5&Tp{| z!TX1xF2aFH9}5o=7T3^p_`XRf!%)OzCF+m`8Yv6G_^pkjYz^gsU+_Nxn8v%{Sm}M_ z)uVySg$$osj&F;{zBh10#E&M2RAP9Oo+qfBZW_3cEI$S+e``dAoL|X-mAv^vP>x8HBnYLb_~RWCIs7vJ zh@2LQ?@peR#GYrteItH5vEQ>0)pvL`10#G=GkAo@ckzr=3o`J2KH`jPt@uwm1LSf@ zY<%A9lzc5IJ|XyRDn}4D@_?o2@J^C+PyUnS!bAJX%JxFDfrnMY)L8Po9Q6SD&0w$?K74-jl;5`Gk_+DV-ahfzNwE z@_P|~pTE&(#JGM=d-6V`^jZ1_I%3{gm3NBd9bb8;NH6+0zO*ckkK95jDio9O^4dku zspiluN=8Dk91upM~I;Dd+G8OO1gJ`4Upt1&G zuSoo-JvHT}{>QzjebXn%pObGP{E<+7N>T0s>#&u$pzJpqo`1+pmXR% zJ{RDZ$n%&oEyXJu&!h1?@?Rc>Bkr*%40g)Kjk3uL#8%Po>NqZ zC_nz~H2ZhkplF2rj~Wq$J;QNK)M7|Ci_Z#FW9g5Nrlve2@UQTBl5fS{@-xEcO#T&q zAIf*s($u(7EGVDa@r=He_rv8qn<)yEE2tKvAP+uA-|_~x4CFv^YJFF}>G2w`Ad-XE zNdLovy1{FqP-Oqk9hA4|xA>Y*Wyr_x>&i>dmz(knwN1XvRQ`(6|8x2ITf7U6}9gl|5NRxjqio1WGOi7bZq3BO&T+_k;V*VnlzN1S^(8=iURdWbS%6FJBQL& zd>`cr`re9sVZp4PcfY1mtILmlrU+JK6h3;CF$6enath`nM={`AbrAm3r|HblNPzl(st4qqPELmRF?PCIu5xq< zW6rC%UB~?`ynhSz{5fiUPWdgPyq4(E!%#k?-iKOMMc+;Jf_E5hh~J4WKN|XF8ZAba zmy35&Cr@=cBEBcAsM&NDsXo)*l-sD6qJ*o{xV1pE$-kcOWnyuz>f#)Q<9|5La}drEWkDKuku$Omd&}?&f99gf zU!!BAevOWbFFBQ(T-Md)rJy`RW#cm-m7$`feE#8E0ObijpW{P_9zX7&u_%iSMiHmc zBY)=TRZ>L$r+nop|e3F0zUWfW4?+UMBhoVqsUSzDryh?GF$$Qq>z+?b51v! z2asp`Ej%Nohr4*~32vmf@d*1-exxi(&epHWb0q2O=HOM*U!r`HQl1%|*w=}DsB~0k z_zc7AK~e9-kI(qh^W&ZL^1Uh)qsS4SdS~j5NpFh&(|f%35`CIB-XESu*}U&I)o=Pe z8YPhm6uHuqE?0#@7pWD}@jSU7?evIz{V2m#jDq-#PV*OX zA*WFV|A-VkX^cl>3BKj>S(7pg9SJ|Kqn?9~ky;~-@+gnex6skiH^<^RKX#$-;bWk~ zdo)9#Sqqhj&leRDr`S{ZXpTqUL%$PUo((9E^Vz)Oo-%rEY`9FU5 z??<)q`&23#*U@;6V#$x{X!K2^JkkS=DnH}Ydr-ckyhlgOXFfU-8hKYQ-;a71Iy#Cz zy+i#DomI--G)Cilay~ax+~_<~`RKP|vHcw>dPXIKGa~M3{7-Y)+U37Oy(4{!uj|~u z^f5F>q6qQ#^oVLd#g-yWH>!1X&MI<0jWekCpz$aj50#lOxeWb0UoMIV9~a8&{O|t# zs4l*X`Urk*LHz+mi(*+(8|br9YKe{p#jTq7Rn$7}W9*Q47 zW9Hii)ndMvr|}TQk1sQwmspe}y1X>>h;IBnjX^5rPqYVrUg@Z)r=|SB_t6v`exumY zM*S4MN@F}eew1JMzL!6fR5Qr?l|LT7PSAZt{PU*tcvU%vkR>-bSi{rB4M|Jm{5&nvaE|8@QQ@Ay%BrSnd?iEqFEf5h(}vnZec z{=5EBoBPk!zyFTkzu*7<@5C<-b)Lq$G_L;V{+D{ViunjX7vk?Jlha6s{`h$x|ET`_ for the device:: + + gactions test --action_package blink.json --project + +- Try a custom device action query like "Blink 5 times". + +- Run in verbose mode to see the gRPC communication with the Google Assistant API:: + + python -m pushtotalk -v + - Send a pre-recorded request to the Assistant:: python -m pushtotalk -i in.wav diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json b/google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json new file mode 100644 index 0000000..43a9eca --- /dev/null +++ b/google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json @@ -0,0 +1,31 @@ +{ + "manifest": { + "displayName": "Blinky light", + "invocationName": "Blinky light", + "category": "PRODUCTIVITY" + }, + "actions": [{ + "name": "actions.devices", + "config": { + "@type": "type.googleapis.com/google.actions.v2.devices.DeviceControl", + "commands": [{ + "intent": { + "name": "BLINK", + "parameters": [{ + "name": "number", + "type": "SchemaOrg_Number" + }], + "trigger": { + "queryPatterns": [ + "Blink $SchemaOrg_Number:number times" + ] + } + }, + "directResponseFulfillment": { + "ttsPattern": "Blinking the light $SchemaOrg_Number:number times" + }, + "requiredTraits": ["Blink"] + }] + } + }] +} diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/audio_helpers.py b/google-assistant-sdk/googlesamples/assistant/grpc/audio_helpers.py index b8ce334..cd6d5de 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/audio_helpers.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/audio_helpers.py @@ -295,7 +295,6 @@ def volume_percentage(self): @volume_percentage.setter def volume_percentage(self, new_volume_percentage): - logging.info('Volume set to %s%%', new_volume_percentage) self._volume_percentage = new_volume_percentage def read(self, size): diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py b/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py new file mode 100644 index 0000000..a8a9d72 --- /dev/null +++ b/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py @@ -0,0 +1,99 @@ +# Copyright (C) 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for the Device Actions.""" + +import concurrent.futures +import logging +import sys + + +key_inputs_ = 'inputs' +key_intent_ = 'intent' +key_payload_ = 'payload' +key_commands_ = 'commands' +key_id_ = 'id' + + +class DeviceRequestHandler(object): + """Asynchronous dispatcher for Device actions commands. + + Dispatch commands to the given device handlers. + + Args: + device_id: device id to match command against + handlers: map of command name to callable. + + Example: + # Pass key value params to register handler for intent at creation time. + DeviceRequestHandler('my-device', INTENT_NAME=handler) + + # Use as as decorator to register handler. + device_handler = DeviceRequestHandler('my-device') + @device_handler.command('INTENT_NAME') + def handler(param): + pass + """ + + def __init__(self, device_id, **handlers): + self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + self.device_id = device_id + self.handlers = handlers + + def __call__(self, device_request): + """Handle incoming device request. + + Returns: List of concurrent.futures for each command execution. + """ + fs = [] + if key_inputs_ in device_request: + for input in device_request[key_inputs_]: + if input[key_intent_] == 'action.devices.EXECUTE': + for command in input[key_payload_][key_commands_]: + fs.extend(self.submit_commands(**command)) + return fs + + def command(self, intent): + """Register a device action handlers.""" + def decorator(fn): + self.handlers[intent] = fn + return decorator + + def submit_commands(self, devices, execution): + """Submit device command executions.""" + fs = [] + for device in devices: + if device[key_id_] == self.device_id: + for command in execution: + f = self.executor.submit( + self.dispatch_command, **command + ) + fs.append(f) + else: + logging.warning('Ignoring command for unknown device: %s' + % device[key_id_]) + return fs + + def dispatch_command(self, command, params=None): + """Dispatch device commands to the appropriate handler.""" + try: + if command in self.handlers: + self.handlers[command](**params) + else: + logging.warning('Unsupported command: %s: %s', + command, params) + except Exception as e: + logging.warning('Error during command execution', + exc_info=sys.exc_info()) + raise e diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py index 204abf8..40226f2 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Sample that implements gRPC client for Google Assistant API.""" +"""Sample that implements a gRPC client for the Google Assistant API.""" +import concurrent.futures import json import logging import os.path +import time import click import grpc @@ -31,32 +33,39 @@ try: from . import ( assistant_helpers, - audio_helpers + audio_helpers, + device_helpers ) except SystemError: import assistant_helpers import audio_helpers + import device_helpers ASSISTANT_API_ENDPOINT = 'embeddedassistant.googleapis.com' END_OF_UTTERANCE = embedded_assistant_pb2.ConverseResponse.END_OF_UTTERANCE DIALOG_FOLLOW_ON = embedded_assistant_pb2.ConverseResult.DIALOG_FOLLOW_ON CLOSE_MICROPHONE = embedded_assistant_pb2.ConverseResult.CLOSE_MICROPHONE +DEFAULT_DEVICE_ID = 'googlesamples-pushtotalk-device' DEFAULT_GRPC_DEADLINE = 60 * 3 + 5 class SampleAssistant(object): - """Sample Assistant that supports follow-on conversations. + """Sample Assistant that supports conversations and device actions. Args: + device_id: identifier of the device. conversation_stream(ConversationStream): audio stream for recording query and playing back assistant answer. channel: authorized gRPC channel for connection to the Google Assistant API. deadline_sec: gRPC deadline in seconds for Google Assistant API call. + device_handler: callback for device actions. """ - def __init__(self, conversation_stream, channel, deadline_sec): + def __init__(self, device_id, conversation_stream, channel, deadline_sec, + device_handler): + self.device_id = device_id self.conversation_stream = conversation_stream # Opaque blob provided in ConverseResponse that, @@ -71,6 +80,8 @@ def __init__(self, conversation_stream, channel, deadline_sec): self.assistant = embedded_assistant_pb2.EmbeddedAssistantStub(channel) self.deadline = deadline_sec + self.device_handler = device_handler + def __enter__(self): return self @@ -94,6 +105,7 @@ def converse(self): Returns: True if conversation should continue. """ continue_conversation = False + device_actions_futures = [] self.conversation_stream.start_recording() logging.info('Recording audio request.') @@ -129,6 +141,8 @@ def iter_converse_requests(): if resp.result.conversation_state: self.conversation_state = resp.result.conversation_state if resp.result.volume_percentage != 0: + logging.info('Setting volume to %s%%', + resp.result.volume_percentage) self.conversation_stream.volume_percentage = ( resp.result.volume_percentage ) @@ -137,6 +151,18 @@ def iter_converse_requests(): logging.info('Expecting follow-on query from user.') elif resp.result.microphone_mode == CLOSE_MICROPHONE: continue_conversation = False + if resp.device_action.device_request_json: + device_request = json.loads( + resp.device_action.device_request_json + ) + fs = self.device_handler(device_request) + if fs: + device_actions_futures.extend(fs) + + if len(device_actions_futures): + logging.info('Waiting for device executions to complete.') + concurrent.futures.wait(device_actions_futures) + logging.info('Finished playing assistant response.') self.conversation_stream.stop_playback() return continue_conversation @@ -161,7 +187,10 @@ def gen_converse_requests(self): sample_rate_hertz=self.conversation_stream.sample_rate, volume_percentage=self.conversation_stream.volume_percentage, ), - converse_state=converse_state + converse_state=converse_state, + device_config=embedded_assistant_pb2.DeviceConfig( + device_id=self.device_id, + ) ) # The first ConverseRequest must contain the ConverseConfig # and no audio data. @@ -180,6 +209,9 @@ def gen_converse_requests(self): default=os.path.join(click.get_app_dir('google-oauthlib-tool'), 'credentials.json'), help='Path to read OAuth2 credentials.') +@click.option('--device-id', default=DEFAULT_DEVICE_ID, + metavar='', show_default=True, + help='Unique device instance identifier.') @click.option('--verbose', '-v', is_flag=True, default=False, help='Verbose logging.') @click.option('--input-audio-file', '-i', @@ -217,7 +249,7 @@ def gen_converse_requests(self): help='gRPC deadline in seconds') @click.option('--once', default=False, is_flag=True, help='Force termination after a single conversation.') -def main(api_endpoint, credentials, verbose, +def main(api_endpoint, credentials, device_id, verbose, input_audio_file, output_audio_file, audio_sample_rate, audio_sample_width, audio_iter_size, audio_block_size, audio_flush_size, @@ -298,8 +330,18 @@ def main(api_endpoint, credentials, verbose, sample_width=audio_sample_width, ) - with SampleAssistant(conversation_stream, - grpc_channel, grpc_deadline) as assistant: + device_handler = device_helpers.DeviceRequestHandler(device_id) + + @device_handler.command('BLINK') + def blink(number): + logging.info('Blinking device %s times.' % number) + for i in range(int(number)): + logging.info('Device is blinking.') + time.sleep(1) + + with SampleAssistant(device_id, conversation_stream, + grpc_channel, grpc_deadline, + device_handler) as assistant: # If file arguments are supplied: # exit after the first turn of the conversation. if input_audio_file or output_audio_file: diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt b/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt index a5e817b..af5964d 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt +++ b/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt @@ -4,3 +4,4 @@ urllib3[secure]==1.21.1 sounddevice==0.3.7 click==6.7 tenacity==4.1.0 +futures==3.1.1 diff --git a/google-assistant-sdk/tests/test_device_helpers.py b/google-assistant-sdk/tests/test_device_helpers.py new file mode 100644 index 0000000..49b48ff --- /dev/null +++ b/google-assistant-sdk/tests/test_device_helpers.py @@ -0,0 +1,115 @@ +#!/usr/bin/python +# Copyright (C) 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import concurrent.futures + +from googlesamples.assistant.grpc import device_helpers + + +def build_device_request(device_id, command, arg): + return { + 'inputs': [{ + 'intent': 'action.devices.EXECUTE', + 'payload': { + 'commands': [{ + 'devices': [ + {'id': device_id} + ], + 'execution': [{ + 'command': command, + 'params': {'arg': arg} + }] + }] + } + }], + 'requestId': '42' + } + + +class DeviceRequestHandlerTest(unittest.TestCase): + def setUp(self): + self.handler_called = False + + def handler(self, arg): + self.handler_called = arg + + def test_success(self): + device_handler = device_helpers.DeviceRequestHandler( + 'some-device', + SOME_COMMAND=self.handler + ) + device_request = build_device_request('some-device', + 'SOME_COMMAND', + 'some-arg') + fs = device_handler(device_request) + self.assertEqual(len(fs), 1) + concurrent.futures.wait(fs) + self.assertEqual(self.handler_called, 'some-arg') + + def test_different_device(self): + device_handler = device_helpers.DeviceRequestHandler( + 'some-device', + SOME_COMMAND=self.handler + ) + device_request = build_device_request('other-device', + 'SOME_COMMAND', + 'some-arg') + fs = device_handler(device_request) + self.assertEqual(len(fs), 0) + self.assertFalse(self.handler_called) + + def test_unknown_command(self): + device_handler = device_helpers.DeviceRequestHandler( + 'some-device', + SOME_COMMAND=self.handler + ) + device_request = build_device_request('some-device', + 'OTHER_COMMAND', + 'some-arg') + fs = device_handler(device_request) + self.assertEqual(len(fs), 1) + self.assertFalse(self.handler_called) + + def test_exception(self): + err = Exception('some error') + + def failing_command(arg): + raise err + device_handler = device_helpers.DeviceRequestHandler( + 'some-device', + FAILING_COMMAND=failing_command + ) + device_request = build_device_request('some-device', + 'FAILING_COMMAND', + 'some-arg') + fs = device_handler(device_request) + self.assertEqual(len(fs), 1) + concurrent.futures.wait(fs) + self.assertEqual(fs[0].exception(), err) + + def test_decorator(self): + device_handler = device_helpers.DeviceRequestHandler('some-device') + + @device_handler.command('SOME_COMMAND') + def handler(arg): + self.handler_called = arg + device_request = build_device_request('some-device', + 'SOME_COMMAND', + 'some-arg') + fs = device_handler(device_request) + self.assertEqual(len(fs), 1) + concurrent.futures.wait(fs) + self.assertEqual(self.handler_called, 'some-arg') From 8699212da0ceacb68d6006de7afa853a3da422bd Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Thu, 17 Aug 2017 14:54:53 +0900 Subject: [PATCH 02/35] google-assistant-sdk: add device registration sample Usage: googlesamples-assistant-devicetool register --model 'my-model' --type LIGHT --trait Blink ... --device 'my-device' googlesamples-assistant-devicetool register-model --model 'my-model' --type LIGHT --trait Blink ... googlesamples-assistant-devicetool register-device --device 'my-device' --model 'my-model' ... googlesamples-assistant-devicetool get --model my-model googlesamples-assistant-devicetool get --device my-device googlesamples-assistant-devicetool list --model googlesamples-assistant-devicetool list --device Note: This reuses google-oauthlib-tool credentials. If the device model already exists it will update it. If the device instance already exists it will delete and re-create it. Bug: 63617472 Bug: 64960008 Change-Id: Ibeb6a535c981633669169ba3659a4d6d77b85d0f --- google-assistant-sdk/README.rst | 69 +++++- .../googlesamples/assistant/grpc/README.rst | 26 ++- .../assistant/grpc/devicetool.py | 207 ++++++++++++++++++ .../assistant/grpc/requirements.txt | 2 +- google-assistant-sdk/setup.py | 2 + 5 files changed, 290 insertions(+), 16 deletions(-) create mode 100644 google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py diff --git a/google-assistant-sdk/README.rst b/google-assistant-sdk/README.rst index 1d59b80..0ca8997 100644 --- a/google-assistant-sdk/README.rst +++ b/google-assistant-sdk/README.rst @@ -20,12 +20,14 @@ google-oauthlib-tool This tool creates test credentials to authorize devices to call the Google Assistant API when prototyping. -- `Follow the steps `_ to configure a Google API Console Project and a Google account to use with the Google Assistant SDK. +- `Follow the steps `_ to configure a Google API Console Project and a Google account to use with the Google Assistant SDK. -- Download the ``client_secret_XXXXX.json`` file from the `Google API Console Project credentials section `_ and generate credentials:: +- Download the ``client_secret_XXXXX.json`` file from the `Google API Console Project credentials section `_ in the current directory. + +- Generate credentials using ``google-oauth-tool``:: pip install --upgrade google-auth-oauthlib[tool] - google-oauthlib-tool --client-secrets path/to/client_secret_XXXXX.json --scope https://www.googleapis.com/auth/assistant-sdk-prototype --save --headless + google-oauthlib-tool --client-secrets client_secret_XXXXX.json --scope https://www.googleapis.com/auth/assistant-sdk-prototype --save --headless googlesamples-assistant-audiotest ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -48,21 +50,54 @@ This tool verifies device setup for audio recording and playback. The same ``--audio-block-size`` and ``--audio-flush-size`` options can be used on the ``gRPC`` samples included in the SDK. -googlesamples-assistant-pushtotalk +googlesamples-assistant-devicetool ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This reference sample implements a simple but functional client for the `Google Assistant gRPC API`_. +This tool allows you to register Google Assistant device models and +instances and associate them with Device Actions traits. - Install the sample's dependencies:: sudo apt-get install portaudio19-dev libffi-dev libssl-dev pip install --upgrade google-assistant-sdk[samples] -- Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: +- Show the CLI tool usage:: - googlesamples-assistant-pushtotalk + googlesamples-assistant-devicetool --help + +- Register a new device model and new device instance:: + + googlesamples-assistant-devicetool register --model my-model \ + --type LIGHT --trait Blink \ + --manufacturer 'Assistant SDK developer' \ + --product-name 'Assistant SDK sample' \ + --description 'Assistant SDK sample device' \ + --device my-device \ + --nickname 'My Assistant Device' + +- Register or update the device model with the supported traits:: -- Try some Google Assistant voice query like "What time is it?". + googlesamples-assistant-devicetool register-model --model my-model \ + --type LIGHT --trait Blink \ + --manufacturer 'Assistant SDK developer' \ + --product-name 'Assistant SDK sample' \ + --description 'Assistant SDK sample device' + +- Register or update the device instance using the device model:: + + googlesamples-assistant-devicetool register-device --device my-device \ + --model my-model \ + --nickname 'My Assistant Device' + +- Verify that the device model and instance have been registered correctly:: + + googlesamples-assistant-devicetool get --model my-model + googlesamples-assistant-devicetool get --device my-device + +- List all device models and instances:: + + googlesamples-assistant-devicetool list --model + googlesamples-assistant-devicetool list --device - Get the `gactions`_ CLI tool. @@ -104,7 +139,23 @@ This reference sample implements a simple but functional client for the `Google gactions test --action_package blink.json --project -- Try a custom device action query like "Blink 5 times". +googlesamples-assistant-pushtotalk +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This reference sample implements a simple but functional client for the `Google Assistant gRPC API`_. + +- Install the sample's dependencies:: + + sudo apt-get install portaudio19-dev libffi-dev libssl-dev + pip install --upgrade google-assistant-sdk[samples] + +- Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: + + googlesamples-assistant-pushtotalk + +- Try some Google Assistant voice query like "What time is it?" or "Who am I?". + +- Try a custom device action query supported by the device like "Blink 5 times". - Run in verbose mode to see the gRPC communication with the Google Assistant API:: diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/README.rst b/google-assistant-sdk/googlesamples/assistant/grpc/README.rst index c350d10..fa7c476 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/README.rst +++ b/google-assistant-sdk/googlesamples/assistant/grpc/README.rst @@ -41,16 +41,17 @@ Setup Authorization ------------- -- Follow `the steps to configure the project and the Google account `_. +- Follow `the steps to configure the project and the Google account `_. +- Download the ``client_secret_XXXXX.json`` file from the `Google API Console Project credentials section `_ in the current directory. -- Download the ``client_secret_XXXXX.json`` file from the `Google API Console Project credentials section `_ and generate credentials using ``google-oauth-tool``:: +- Generate credentials using ``google-oauth-tool``:: pip install --upgrade google-auth-oauthlib[tool] - google-oauthlib-tool --client-secrets path/to/client_secret_XXXXX.json --scope https://www.googleapis.com/auth/assistant-sdk-prototype --save --headless + google-oauthlib-tool --client-secrets client_secret_XXXXX.json --scope https://www.googleapis.com/auth/assistant-sdk-prototype --save --headless -Run the sample --------------- +Run the samples +--------------- - Install the sample dependencies:: @@ -62,11 +63,24 @@ Run the sample # Record a 5 sec sample and play it back python -m audio_helpers +- Register the `Action Package`_ describing the Device Actions traits that the device implements:: + + gactions test --action_package blink.json --project + +- Register or update the device model and device instance with the supported traits:: + + python -m devicetool register --type LIGHT --trait Blink \ + --manufacturer 'Assistant SDK developer' \ + --product-name 'Assistant SDK sample' \ + --description 'Assistant SDK sample device' \ + --device my-device \ + --nickname 'My Assistant Device' + - Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: python -m pushtotalk -- Try some Google Assistant voice query like "What time is it?". +- Try some Google Assistant voice query like "What time is it?" or "Who am I?". - Register the `Action package `_ for the device:: diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py new file mode 100644 index 0000000..3a7d5a1 --- /dev/null +++ b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py @@ -0,0 +1,207 @@ +# Copyright (C) 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Sample that implements device registration for the Google Assistant API.""" + +import json +import os + +import click +import google.oauth2.credentials +import google.auth.transport.requests + + +def failed_request_exception(message, r): + """Build ClickException from a failed request.""" + try: + resp = json.loads(r.text) + return click.ClickException('%s: %d %s' % (message, + resp['error']['code'], + resp['error']['message'])) + except ValueError: + # fallback on raw text response if error is not structured. + return click.ClickException('%s: %d\n%s' % (message, + r.status_code, + r.text)) + + +@click.group() +@click.option('--project') +@click.option('--client-secret') +@click.option('--api-endpoint', default='embeddedassistant.googleapis.com', + show_default=True) +@click.option('--credentials', show_default=True, + default=os.path.join(click.get_app_dir('google-oauthlib-tool'), + 'credentials.json')) +@click.pass_context +def cli(ctx, project, client_secret, api_endpoint, credentials): + try: + with open(credentials, 'r') as f: + c = google.oauth2.credentials.Credentials(token=None, + **json.load(f)) + http_request = google.auth.transport.requests.Request() + c.refresh(http_request) + except Exception as e: + raise click.ClickException('Error loading credentials: %s.\n' + 'Run google-oauthlib-tool to initialize ' + 'new OAuth 2.0 credentials.' % e) + if project is None: + if client_secret is None: + client_secret = 'client_secret_%s.json' % c.client_id + try: + with open(client_secret, 'r') as f: + secret = json.load(f) + project = secret['installed']['project_id'] + except Exception as e: + raise click.ClickException('Error loading client secret: %s.\n' + 'Run the register tool' + 'with --client-secret ' + 'or --project option.\n' + 'Or copy the %s file' + 'in the current directory.' + % (e, client_secret)) + ctx.obj['SESSION'] = google.auth.transport.requests.AuthorizedSession(c) + ctx.obj['API_URL'] = ('https://%s/v1alpha2/projects/%s' + % (api_endpoint, project)) + ctx.obj['PROJECT_ID'] = project + + +@cli.command() +@click.option('--model', required=True) +@click.option('--type', type=click.Choice(['LIGHT', 'SWITCH', 'OUTLET']), + required=True) +@click.option('--trait', multiple=True) +@click.option('--manufacturer', required=True) +@click.option('--product-name', required=True) +@click.option('--description', required=True) +@click.option('--device', required=True) +@click.option('--nickname', required=True) +@click.pass_context +def register(ctx, model, type, trait, manufacturer, product_name, description, + device, nickname): + ctx.invoke(register_model, + model=model, type=type, trait=trait, + manufacturer=manufacturer, + product_name=product_name, + description=description) + ctx.invoke(register_device, device=device, model=model, nickname=nickname) + + +@cli.command('register-model') +@click.option('--model', required=True) +@click.option('--type', type=click.Choice(['LIGHT', 'SWITCH', 'OUTLET']), + required=True) +@click.option('--trait', multiple=True) +@click.option('--manufacturer', required=True) +@click.option('--product-name', required=True) +@click.option('--description', required=True) +@click.pass_context +def register_model(ctx, model, type, trait, + manufacturer, product_name, description): + session = ctx.obj['SESSION'] + + model_base_url = '/'.join([ctx.obj['API_URL'], 'deviceModels']) + model_url = '/'.join([model_base_url, model]) + payload = { + 'device_model_id': model, + 'project_id': ctx.obj['PROJECT_ID'], + 'device_type': 'action.devices.types.' + type, + } + if trait: + payload['traits'] = trait + if manufacturer: + payload.setdefault('manifest', {})['manufacturer'] = manufacturer + if product_name: + payload.setdefault('manifest', {})['productName'] = product_name + if description: + payload.setdefault('manifest', {})['deviceDescription'] = description + r = session.get(model_url) + if r.status_code == 200: + click.echo('updating existing device model: %s' % model) + r = session.put(model_url, data=json.dumps(payload)) + elif r.status_code in (400, 404): + click.echo('creating new device model') + r = session.post(model_base_url, data=json.dumps(payload)) + else: + raise failed_request_exception('failed to check existing model', r) + if r.status_code != 200: + raise failed_request_exception('failed to register new model', r) + click.echo(r.text) + + +@cli.command('register-device') +@click.option('--device', required=True) +@click.option('--model', required=True) +@click.option('--nickname', required=True) +@click.pass_context +def register_device(ctx, device, model, nickname): + session = ctx.obj['SESSION'] + + device_base_url = '/'.join([ctx.obj['API_URL'], 'devices']) + device_url = '/'.join([device_base_url, device]) + payload = { + 'id': device, + 'model_id': model, + } + if nickname: + payload['nickname'] = nickname + + r = session.get(device_url) + if r.status_code == 200: + click.echo('updating existing device: %s' % device) + session.delete(device_url) + r = session.post(device_base_url, data=json.dumps(payload)) + elif r.status_code in (400, 404): + click.echo('creating new device') + r = session.post(device_base_url, data=json.dumps(payload)) + else: + raise failed_request_exception('failed to check existing device', r) + if r.status_code != 200: + raise failed_request_exception('failed to register new device', r) + click.echo(r.text) + + +@cli.command() +@click.option('--model', 'resource', flag_value='deviceModels', required=True) +@click.option('--device', 'resource', flag_value='devices', required=True) +@click.argument('id') +@click.pass_context +def get(ctx, resource, id): + session = ctx.obj['SESSION'] + url = '/'.join([ctx.obj['API_URL'], resource, id]) + r = session.get(url) + if r.status_code != 200: + raise failed_request_exception('failed to get resource', r) + click.echo(r.text) + + +@cli.command() +@click.option('--model', 'resource', flag_value='deviceModels', required=True) +@click.option('--device', 'resource', flag_value='devices', required=True) +@click.pass_context +def list(ctx, resource): + session = ctx.obj['SESSION'] + url = '/'.join([ctx.obj['API_URL'], resource]) + r = session.get(url) + if r.status_code != 200: + raise failed_request_exception('failed to list resources', r) + click.echo(r.text) + + +def main(): + cli(obj={}) + + +if __name__ == '__main__': + main() diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt b/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt index af5964d..18f561d 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt +++ b/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt @@ -1,6 +1,6 @@ google-assistant-grpc==0.0.2 google-auth-oauthlib==0.1.0 -urllib3[secure]==1.21.1 +urllib3[secure]==1.22 sounddevice==0.3.7 click==6.7 tenacity==4.1.0 diff --git a/google-assistant-sdk/setup.py b/google-assistant-sdk/setup.py index 462295e..c3929e9 100644 --- a/google-assistant-sdk/setup.py +++ b/google-assistant-sdk/setup.py @@ -58,6 +58,8 @@ def samples_requirements(): 'console_scripts': [ 'googlesamples-assistant-audiotest' '=googlesamples.assistant.grpc.audio_helpers:main', + 'googlesamples-assistant-devicetool' + '=googlesamples.assistant.grpc.devicetool:main', 'googlesamples-assistant-pushtotalk' '=googlesamples.assistant.grpc.pushtotalk:main [samples]', 'googlesamples-assistant-hotword' From 3521210f04e5066d604eef60694f43e8bf460adb Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Wed, 13 Sep 2017 14:28:11 +0900 Subject: [PATCH 03/35] google-assistant-sdk/README: add note about identifier uniqueness Change-Id: I9e13779b138d36f6288fdc89212d312377582945 --- google-assistant-sdk/README.rst | 25 +++++++++++-------- .../googlesamples/assistant/grpc/README.rst | 9 +++++-- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/google-assistant-sdk/README.rst b/google-assistant-sdk/README.rst index 0ca8997..a541681 100644 --- a/google-assistant-sdk/README.rst +++ b/google-assistant-sdk/README.rst @@ -65,34 +65,39 @@ instances and associate them with Device Actions traits. googlesamples-assistant-devicetool --help -- Register a new device model and new device instance:: +- Register a new device model and new device instance (after replacing the 'placeholder values' between quotes):: - googlesamples-assistant-devicetool register --model my-model \ + googlesamples-assistant-devicetool register --model 'my-model-identifier' \ --type LIGHT --trait Blink \ --manufacturer 'Assistant SDK developer' \ --product-name 'Assistant SDK sample' \ --description 'Assistant SDK sample device' \ - --device my-device \ + --device 'my-device-identifier' \ --nickname 'My Assistant Device' -- Register or update the device model with the supported traits:: +- Register or update the device model with the supported traits (after replacing the 'placeholder values' between quotes):: - googlesamples-assistant-devicetool register-model --model my-model \ + googlesamples-assistant-devicetool register-model --model 'my-model-identifier' \ --type LIGHT --trait Blink \ --manufacturer 'Assistant SDK developer' \ --product-name 'Assistant SDK sample' \ --description 'Assistant SDK sample device' -- Register or update the device instance using the device model:: +*Note: The model identifier must be globally unique.* - googlesamples-assistant-devicetool register-device --device my-device \ - --model my-model \ +- Register or update the device instance using the device model (after replacing the 'placeholder values' between quotes):: + + googlesamples-assistant-devicetool register-device --device 'my-device-identifier' \ + --model 'my-model-identifier' \ --nickname 'My Assistant Device' +*Note: The device instance identifier should be unique within the + Google Developer Project associated with the device.* + - Verify that the device model and instance have been registered correctly:: - googlesamples-assistant-devicetool get --model my-model - googlesamples-assistant-devicetool get --device my-device + googlesamples-assistant-devicetool get --model 'my-model-identifier' + googlesamples-assistant-devicetool get --device 'my-device-identifier' - List all device models and instances:: diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/README.rst b/google-assistant-sdk/googlesamples/assistant/grpc/README.rst index fa7c476..88526bd 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/README.rst +++ b/google-assistant-sdk/googlesamples/assistant/grpc/README.rst @@ -69,13 +69,18 @@ Run the samples - Register or update the device model and device instance with the supported traits:: - python -m devicetool register --type LIGHT --trait Blink \ + python -m devicetool register --model 'my-model-identifier' \ + --type LIGHT --trait Blink \ --manufacturer 'Assistant SDK developer' \ --product-name 'Assistant SDK sample' \ --description 'Assistant SDK sample device' \ - --device my-device \ + --device 'my-device-identifier' \ --nickname 'My Assistant Device' +*Note: The device model identifier must be globally unique, and + device instance identifier must be unique within the Google + Developer Project associated with the device.* + - Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: python -m pushtotalk From 74ffa06596195afa4eb19fed993e20beba834549 Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Fri, 15 Sep 2017 14:02:23 +0900 Subject: [PATCH 04/35] google-assistant-sdk: switch to standard trait - remove Blink trait and action package - add action.devices.commands.OnOff handler - make device-id required - make description and nickname optional - fix cli tool error handling Bug: 65391284 Bug: 65704003 Bug: 65704030 Change-Id: I841a20ead9e12dfc8fe5205162ac5dbdb5e7474d --- google-assistant-sdk/README.rst | 71 ++++--------------- .../googlesamples/assistant/grpc/README.rst | 36 +++------- .../assistant/grpc/actions/blink.json | 31 -------- .../assistant/grpc/device_helpers.py | 8 +-- .../assistant/grpc/devicetool.py | 22 +++--- .../assistant/grpc/pushtotalk.py | 16 ++--- google-assistant-sdk/nox.py | 1 + .../tests/test_device_helpers.py | 22 ++---- 8 files changed, 52 insertions(+), 155 deletions(-) delete mode 100644 google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json diff --git a/google-assistant-sdk/README.rst b/google-assistant-sdk/README.rst index a541681..85f7f70 100644 --- a/google-assistant-sdk/README.rst +++ b/google-assistant-sdk/README.rst @@ -68,31 +68,30 @@ instances and associate them with Device Actions traits. - Register a new device model and new device instance (after replacing the 'placeholder values' between quotes):: googlesamples-assistant-devicetool register --model 'my-model-identifier' \ - --type LIGHT --trait Blink \ + --type LIGHT --trait action.devices.traits.OnOff \ --manufacturer 'Assistant SDK developer' \ - --product-name 'Assistant SDK sample' \ - --description 'Assistant SDK sample device' \ + --product-name 'Assistant SDK light' \ + --description 'Assistant SDK light device' \ --device 'my-device-identifier' \ - --nickname 'My Assistant Device' + --nickname 'My Assistant Light' -- Register or update the device model with the supported traits (after replacing the 'placeholder values' between quotes):: +- Register or overwrite the device model with the supported traits (after replacing the 'placeholder values' between quotes):: googlesamples-assistant-devicetool register-model --model 'my-model-identifier' \ - --type LIGHT --trait Blink \ + --type LIGHT --trait action.devices.traits.OnOff \ --manufacturer 'Assistant SDK developer' \ - --product-name 'Assistant SDK sample' \ - --description 'Assistant SDK sample device' + --product-name 'Assistant SDK light' \ + --description 'Assistant SDK light device' *Note: The model identifier must be globally unique.* -- Register or update the device instance using the device model (after replacing the 'placeholder values' between quotes):: +- Register or overwrite the device instance using the device model (after replacing the 'placeholder values' between quotes):: googlesamples-assistant-devicetool register-device --device 'my-device-identifier' \ --model 'my-model-identifier' \ - --nickname 'My Assistant Device' + --nickname 'My Assistant Light' -*Note: The device instance identifier should be unique within the - Google Developer Project associated with the device.* +*Note: The device instance identifier should be unique within the Google Developer Project associated with the device.* - Verify that the device model and instance have been registered correctly:: @@ -104,46 +103,6 @@ instances and associate them with Device Actions traits. googlesamples-assistant-devicetool list --model googlesamples-assistant-devicetool list --device -- Get the `gactions`_ CLI tool. - -- Create an `Action Package`_ describing the Device Actions traits that the device implements:: - - { - "manifest": { - "displayName": "Blinky light", - "invocationName": "Blinky light", - "category": "PRODUCTIVITY" - }, - "actions": [{ - "name": "actions.devices", - "config": { - "@type": "type.googleapis.com/google.actions.v2.devices.DeviceControl", - "commands": [{ - "intent": { - "name": "BLINK", - "parameters": [{ - "name": "number", - "type": "SchemaOrg_Number" - }], - "trigger": { - "queryPatterns": [ - "Blink $SchemaOrg_Number:number times" - ] - } - }, - "directResponseFulfillment": { - "ttsPattern": "Blinking the light $SchemaOrg_Number:number times" - }, - "requiredTraits": ["Blink"] - }] - } - }] - } - -- Register the `Action package`_ using the `gactions`_ CLI tool:: - - gactions test --action_package blink.json --project - googlesamples-assistant-pushtotalk ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -156,15 +115,15 @@ This reference sample implements a simple but functional client for the `Google - Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: - googlesamples-assistant-pushtotalk + googlesamples-assistant-pushtotalk --device-id 'my-device-identifier' - Try some Google Assistant voice query like "What time is it?" or "Who am I?". -- Try a custom device action query supported by the device like "Blink 5 times". +- Try a device action query like "Turn on". - Run in verbose mode to see the gRPC communication with the Google Assistant API:: - googlesamples-assistant-pushtotalk -v + googlesamples-assistant-pushtotalk --device-id 'my-device-identifier' -v googlesamples-assistant-hotword ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -218,5 +177,3 @@ the License. .. _Google Assistant SDK: https://developers.google.com/assistant/sdk .. _Google Assistant gRPC API: https://developers.google.com/assistant/sdk/reference/rpc .. _Google Assistant library: https://developers.google.com/assistant/sdk/reference/library/python -.. _Action Package: https://developers.google.com/actions/reference/rest/Shared.Types/ActionPackage -.. _gactions: https://developers.google.com/actions/tools/gactions-cli diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/README.rst b/google-assistant-sdk/googlesamples/assistant/grpc/README.rst index 88526bd..1a8b0bd 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/README.rst +++ b/google-assistant-sdk/googlesamples/assistant/grpc/README.rst @@ -63,47 +63,37 @@ Run the samples # Record a 5 sec sample and play it back python -m audio_helpers -- Register the `Action Package`_ describing the Device Actions traits that the device implements:: - - gactions test --action_package blink.json --project - -- Register or update the device model and device instance with the supported traits:: +- Register or overwrite the device model and device instance with the supported traits:: python -m devicetool register --model 'my-model-identifier' \ - --type LIGHT --trait Blink \ + --type LIGHT --trait action.devices.traits.OnOff \ --manufacturer 'Assistant SDK developer' \ - --product-name 'Assistant SDK sample' \ - --description 'Assistant SDK sample device' \ + --product-name 'Assistant SDK light' \ + --description 'Assistant SDK light device' \ --device 'my-device-identifier' \ - --nickname 'My Assistant Device' + --nickname 'My Assistant Light' -*Note: The device model identifier must be globally unique, and - device instance identifier must be unique within the Google - Developer Project associated with the device.* +*Note: The device model identifier must be globally unique, and device instance identifier must be unique within the Google Developer Project associated with the device.* - Run the push to talk sample. The sample records a voice query after a key press and plays back the Google Assistant's answer:: - python -m pushtotalk + python -m pushtotalk --device-id 'my-device-identifier' - Try some Google Assistant voice query like "What time is it?" or "Who am I?". -- Register the `Action package `_ for the device:: - - gactions test --action_package blink.json --project - -- Try a custom device action query like "Blink 5 times". +- Try a device action query like "Turn on". - Run in verbose mode to see the gRPC communication with the Google Assistant API:: - python -m pushtotalk -v + python -m pushtotalk --device-id 'my-device-identifier' -v - Send a pre-recorded request to the Assistant:: - python -m pushtotalk -i in.wav + python -m pushtotalk --device-id 'my-device-identifier' -i in.wav - Save the Assistant response to a file:: - python -m pushtotalk -o out.wav + python -m pushtotalk --device-id 'my-device-identifier' -o out.wav Troubleshooting --------------- @@ -117,10 +107,6 @@ Troubleshooting arecord --format=S16_LE --duration=5 --rate=16k --file-type=raw out.raw aplay --format=S16_LE --rate=16k --file-type=raw out.raw -- Run the sample with verbose logging enabled:: - - python -m pushtotalk --verbose - - If Assistant audio is choppy, try adjusting the sound device's block size:: # If using a USB speaker or dedicated soundcard, set block size to "0" diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json b/google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json deleted file mode 100644 index 43a9eca..0000000 --- a/google-assistant-sdk/googlesamples/assistant/grpc/actions/blink.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "manifest": { - "displayName": "Blinky light", - "invocationName": "Blinky light", - "category": "PRODUCTIVITY" - }, - "actions": [{ - "name": "actions.devices", - "config": { - "@type": "type.googleapis.com/google.actions.v2.devices.DeviceControl", - "commands": [{ - "intent": { - "name": "BLINK", - "parameters": [{ - "name": "number", - "type": "SchemaOrg_Number" - }], - "trigger": { - "queryPatterns": [ - "Blink $SchemaOrg_Number:number times" - ] - } - }, - "directResponseFulfillment": { - "ttsPattern": "Blinking the light $SchemaOrg_Number:number times" - }, - "requiredTraits": ["Blink"] - }] - } - }] -} diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py b/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py index a8a9d72..7b0f268 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py @@ -33,12 +33,8 @@ class DeviceRequestHandler(object): Args: device_id: device id to match command against - handlers: map of command name to callable. Example: - # Pass key value params to register handler for intent at creation time. - DeviceRequestHandler('my-device', INTENT_NAME=handler) - # Use as as decorator to register handler. device_handler = DeviceRequestHandler('my-device') @device_handler.command('INTENT_NAME') @@ -46,10 +42,10 @@ def handler(param): pass """ - def __init__(self, device_id, **handlers): + def __init__(self, device_id): self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) self.device_id = device_id - self.handlers = handlers + self.handlers = {} def __call__(self, device_request): """Handle incoming device request. diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py index 3a7d5a1..8f48261 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py @@ -26,9 +26,13 @@ def failed_request_exception(message, r): """Build ClickException from a failed request.""" try: resp = json.loads(r.text) - return click.ClickException('%s: %d %s' % (message, - resp['error']['code'], - resp['error']['message'])) + message = '%s: %d %s' % (message, + resp['error']['code'], + resp['error']['message']) + if 'details' in resp['error']: + details = '\n'.join(d['detail'] for d in resp['error']['details']) + message += ' ' + details + return click.ClickException(message) except ValueError: # fallback on raw text response if error is not structured. return click.ClickException('%s: %d\n%s' % (message, @@ -84,9 +88,9 @@ def cli(ctx, project, client_secret, api_endpoint, credentials): @click.option('--trait', multiple=True) @click.option('--manufacturer', required=True) @click.option('--product-name', required=True) -@click.option('--description', required=True) +@click.option('--description') @click.option('--device', required=True) -@click.option('--nickname', required=True) +@click.option('--nickname') @click.pass_context def register(ctx, model, type, trait, manufacturer, product_name, description, device, nickname): @@ -105,7 +109,7 @@ def register(ctx, model, type, trait, manufacturer, product_name, description, @click.option('--trait', multiple=True) @click.option('--manufacturer', required=True) @click.option('--product-name', required=True) -@click.option('--description', required=True) +@click.option('--description') @click.pass_context def register_model(ctx, model, type, trait, manufacturer, product_name, description): @@ -136,14 +140,14 @@ def register_model(ctx, model, type, trait, else: raise failed_request_exception('failed to check existing model', r) if r.status_code != 200: - raise failed_request_exception('failed to register new model', r) + raise failed_request_exception('failed to register model', r) click.echo(r.text) @cli.command('register-device') @click.option('--device', required=True) @click.option('--model', required=True) -@click.option('--nickname', required=True) +@click.option('--nickname') @click.pass_context def register_device(ctx, device, model, nickname): session = ctx.obj['SESSION'] @@ -168,7 +172,7 @@ def register_device(ctx, device, model, nickname): else: raise failed_request_exception('failed to check existing device', r) if r.status_code != 200: - raise failed_request_exception('failed to register new device', r) + raise failed_request_exception('failed to register device', r) click.echo(r.text) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py index 40226f2..2bd0813 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py @@ -18,7 +18,6 @@ import json import logging import os.path -import time import click import grpc @@ -46,7 +45,6 @@ END_OF_UTTERANCE = embedded_assistant_pb2.ConverseResponse.END_OF_UTTERANCE DIALOG_FOLLOW_ON = embedded_assistant_pb2.ConverseResult.DIALOG_FOLLOW_ON CLOSE_MICROPHONE = embedded_assistant_pb2.ConverseResult.CLOSE_MICROPHONE -DEFAULT_DEVICE_ID = 'googlesamples-pushtotalk-device' DEFAULT_GRPC_DEADLINE = 60 * 3 + 5 @@ -209,7 +207,7 @@ def gen_converse_requests(self): default=os.path.join(click.get_app_dir('google-oauthlib-tool'), 'credentials.json'), help='Path to read OAuth2 credentials.') -@click.option('--device-id', default=DEFAULT_DEVICE_ID, +@click.option('--device-id', required=True, metavar='', show_default=True, help='Unique device instance identifier.') @click.option('--verbose', '-v', is_flag=True, default=False, @@ -332,12 +330,12 @@ def main(api_endpoint, credentials, device_id, verbose, device_handler = device_helpers.DeviceRequestHandler(device_id) - @device_handler.command('BLINK') - def blink(number): - logging.info('Blinking device %s times.' % number) - for i in range(int(number)): - logging.info('Device is blinking.') - time.sleep(1) + @device_handler.command('action.devices.commands.OnOff') + def onoff(on): + if on: + logging.info('Turning device on') + else: + logging.info('Turning device off') with SampleAssistant(device_id, conversation_stream, grpc_channel, grpc_deadline, diff --git a/google-assistant-sdk/nox.py b/google-assistant-sdk/nox.py index fa5bfd4..d825107 100644 --- a/google-assistant-sdk/nox.py +++ b/google-assistant-sdk/nox.py @@ -68,6 +68,7 @@ def migrate_credentials(old, new): temp_dir = tempfile.mkdtemp() audio_out_file = os.path.join(temp_dir, 'out.raw') session.run('python', '-m', 'googlesamples.assistant.grpc.pushtotalk', + '--device-id', 'test-device', '-i', 'tests/data/whattimeisit.riff', '-o', audio_out_file) session.run('test', '-s', audio_out_file) diff --git a/google-assistant-sdk/tests/test_device_helpers.py b/google-assistant-sdk/tests/test_device_helpers.py index 49b48ff..e5feb0b 100644 --- a/google-assistant-sdk/tests/test_device_helpers.py +++ b/google-assistant-sdk/tests/test_device_helpers.py @@ -49,8 +49,8 @@ def handler(self, arg): def test_success(self): device_handler = device_helpers.DeviceRequestHandler( 'some-device', - SOME_COMMAND=self.handler ) + device_handler.command('SOME_COMMAND')(self.handler) device_request = build_device_request('some-device', 'SOME_COMMAND', 'some-arg') @@ -62,8 +62,8 @@ def test_success(self): def test_different_device(self): device_handler = device_helpers.DeviceRequestHandler( 'some-device', - SOME_COMMAND=self.handler ) + device_handler.command('SOME_COMMAND')(self.handler) device_request = build_device_request('other-device', 'SOME_COMMAND', 'some-arg') @@ -74,8 +74,8 @@ def test_different_device(self): def test_unknown_command(self): device_handler = device_helpers.DeviceRequestHandler( 'some-device', - SOME_COMMAND=self.handler ) + device_handler.command('SOME_COMMAND')(self.handler) device_request = build_device_request('some-device', 'OTHER_COMMAND', 'some-arg') @@ -90,8 +90,8 @@ def failing_command(arg): raise err device_handler = device_helpers.DeviceRequestHandler( 'some-device', - FAILING_COMMAND=failing_command ) + device_handler.command('FAILING_COMMAND')(failing_command) device_request = build_device_request('some-device', 'FAILING_COMMAND', 'some-arg') @@ -99,17 +99,3 @@ def failing_command(arg): self.assertEqual(len(fs), 1) concurrent.futures.wait(fs) self.assertEqual(fs[0].exception(), err) - - def test_decorator(self): - device_handler = device_helpers.DeviceRequestHandler('some-device') - - @device_handler.command('SOME_COMMAND') - def handler(arg): - self.handler_called = arg - device_request = build_device_request('some-device', - 'SOME_COMMAND', - 'some-arg') - fs = device_handler(device_request) - self.assertEqual(len(fs), 1) - concurrent.futures.wait(fs) - self.assertEqual(self.handler_called, 'some-arg') From e87b4a685e703df64a3892ea1bc96bb72dd140ec Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Wed, 27 Sep 2017 15:36:51 +0900 Subject: [PATCH 05/35] googlesamples/grpc/device_helper: ignore noop execution Bug: 66963608 Change-Id: Ia1b2074749266aa135a483ead3406c4fa6118947 --- .../assistant/grpc/device_helpers.py | 22 +++++++++------ .../tests/test_device_helpers.py | 27 +++++++++++++++++++ 2 files changed, 41 insertions(+), 8 deletions(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py b/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py index 7b0f268..22930c5 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/device_helpers.py @@ -67,18 +67,24 @@ def decorator(fn): return decorator def submit_commands(self, devices, execution): - """Submit device command executions.""" + """Submit device command executions. + + Returns: a list of concurrent.futures for scheduled executions. + """ fs = [] for device in devices: - if device[key_id_] == self.device_id: - for command in execution: - f = self.executor.submit( - self.dispatch_command, **command - ) - fs.append(f) - else: + if device[key_id_] != self.device_id: logging.warning('Ignoring command for unknown device: %s' % device[key_id_]) + continue + if not execution: + logging.warning('Ignoring noop execution') + continue + for command in execution: + f = self.executor.submit( + self.dispatch_command, **command + ) + fs.append(f) return fs def dispatch_command(self, command, params=None): diff --git a/google-assistant-sdk/tests/test_device_helpers.py b/google-assistant-sdk/tests/test_device_helpers.py index e5feb0b..b62cad4 100644 --- a/google-assistant-sdk/tests/test_device_helpers.py +++ b/google-assistant-sdk/tests/test_device_helpers.py @@ -39,6 +39,23 @@ def build_device_request(device_id, command, arg): } +def build_noop_device_request(device_id): + return { + 'inputs': [{ + 'intent': 'action.devices.EXECUTE', + 'payload': { + 'commands': [{ + 'devices': [ + {'id': device_id} + ], + 'execution': None, + }] + } + }], + 'requestId': '42' + } + + class DeviceRequestHandlerTest(unittest.TestCase): def setUp(self): self.handler_called = False @@ -83,6 +100,16 @@ def test_unknown_command(self): self.assertEqual(len(fs), 1) self.assertFalse(self.handler_called) + def test_noop_execution(self): + device_handler = device_helpers.DeviceRequestHandler( + 'some-device', + ) + device_handler.command('SOME_COMMAND')(self.handler) + device_request = build_noop_device_request('some-device') + fs = device_handler(device_request) + self.assertEqual(len(fs), 0) + self.assertFalse(self.handler_called) + def test_exception(self): err = Exception('some error') From b841409a2bd5aef43cc3b5a9334674c8b8fabfcc Mon Sep 17 00:00:00 2001 From: Nick Felker Date: Fri, 6 Oct 2017 10:31:34 -0700 Subject: [PATCH 06/35] Increases versions to new MINOR release Change-Id: Ib18bce5c1756d33c8324ef7bb51ec7f71721a345 --- google-assistant-grpc/CHANGELOG.rst | 3 +++ google-assistant-grpc/setup.py | 2 +- google-assistant-library/CHANGELOG.rst | 2 -- google-assistant-sdk/CHANGELOG.rst | 4 +++- google-assistant-sdk/setup.py | 2 +- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/google-assistant-grpc/CHANGELOG.rst b/google-assistant-grpc/CHANGELOG.rst index 1685b61..052a02d 100644 --- a/google-assistant-grpc/CHANGELOG.rst +++ b/google-assistant-grpc/CHANGELOG.rst @@ -1,5 +1,8 @@ Changelog ========= +0.1.0 +----- +- Adds Device Actions to gRPC protobuf 0.0.2 ----- diff --git a/google-assistant-grpc/setup.py b/google-assistant-grpc/setup.py index 84f49c7..d1016a8 100644 --- a/google-assistant-grpc/setup.py +++ b/google-assistant-grpc/setup.py @@ -25,7 +25,7 @@ setup( name='google-assistant-grpc', - version='0.0.2', + version='0.1.0', author='Google Assistant SDK team', author_email='proppy+assistant-sdk@google.com', description='Google Assistant API gRPC bindings', diff --git a/google-assistant-library/CHANGELOG.rst b/google-assistant-library/CHANGELOG.rst index ba354d0..007f9d5 100644 --- a/google-assistant-library/CHANGELOG.rst +++ b/google-assistant-library/CHANGELOG.rst @@ -1,7 +1,5 @@ Changelog ========= - - 0.0.3 ----- - Support for x86_64 Linux diff --git a/google-assistant-sdk/CHANGELOG.rst b/google-assistant-sdk/CHANGELOG.rst index d0dff9a..7c885da 100644 --- a/google-assistant-sdk/CHANGELOG.rst +++ b/google-assistant-sdk/CHANGELOG.rst @@ -1,6 +1,8 @@ Changelog ========= - +0.4.0 +----- +- Adds Device Actions 0.3.3 ------- diff --git a/google-assistant-sdk/setup.py b/google-assistant-sdk/setup.py index c3929e9..289706e 100644 --- a/google-assistant-sdk/setup.py +++ b/google-assistant-sdk/setup.py @@ -39,7 +39,7 @@ def samples_requirements(): setup( name='google-assistant-sdk', - version='0.3.3', + version='0.4.0', author='Google Assistant SDK team', author_email='proppy+assistant-sdk@google.com', description='Samples and Tools the Google Assistant SDK', From 175864881eae86ab1080165b15b1a1842095ad29 Mon Sep 17 00:00:00 2001 From: Nick Felker Date: Tue, 10 Oct 2017 18:25:29 -0700 Subject: [PATCH 07/35] Updates gRPC requirement for samples Bug: 67500578 Change-Id: I849b2ec3fb577f6ec75ddca7c6ee6bbaa5497d42 --- .../googlesamples/assistant/grpc/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt b/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt index 3b3d758..69f1351 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt +++ b/google-assistant-sdk/googlesamples/assistant/grpc/requirements.txt @@ -1,4 +1,4 @@ -google-assistant-grpc==0.0.2 +google-assistant-grpc==0.1.0 google-auth-oauthlib>=0.1.0,<0.2 urllib3[secure]>=1.21,<2 sounddevice>=0.3.7,<0.4 From 4a176c4e8030496501b02dce41fd0e503c797d13 Mon Sep 17 00:00:00 2001 From: Nick Cook Date: Wed, 18 Oct 2017 14:04:54 -0700 Subject: [PATCH 08/35] Add help text to devicetool commands. Change-Id: Ide6341ef85919e59d43152d68e3c8c0819dcb9a2 Bug: 67367104 (cherry picked from commit 0a8284ee2b76a4dcfeaee966e38bd39a4000e204) --- .../assistant/grpc/devicetool.py | 142 +++++++++++++++--- 1 file changed, 119 insertions(+), 23 deletions(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py index 8f48261..0e22d8b 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py @@ -41,13 +41,34 @@ def failed_request_exception(message, r): @click.group() -@click.option('--project') -@click.option('--client-secret') +@click.option('--project', + help='Enter the Google Developer Project ID that you want to ' + 'use with the registration tool. If you don\'t use this flag, ' + 'the tool will use the project listed in the ' + ' file you specify with the ' + '--client-secret flag.') +@click.option('--client-secret', + help='Enter the path and filename for the ' + ' file you downloaded from your ' + 'developer project. This file is used to infer the Google ' + 'Developer Project ID if it was not provided with the --project ' + 'flag. If the --project flag and this flag are not used, the ' + 'tool will look for this file in the current directory (by ' + 'searching for a file named after the client_id stored in the ' + 'credentials file).') @click.option('--api-endpoint', default='embeddedassistant.googleapis.com', - show_default=True) + show_default=True, + help='Hostname for the Google Assistant API. Do not use this ' + 'flag unless explicitly instructed.') @click.option('--credentials', show_default=True, default=os.path.join(click.get_app_dir('google-oauthlib-tool'), - 'credentials.json')) + 'credentials.json'), + help='File location of the generated credentials file. The ' + 'google-oauthlib-tool generates this file after authorizing ' + 'the user with the file. This ' + 'credentials file authorizes access to the Google Assistant ' + 'API. You can use this flag if the credentials were generated ' + 'in a location that is different than the default.') @click.pass_context def cli(ctx, project, client_secret, api_endpoint, credentials): try: @@ -82,18 +103,47 @@ def cli(ctx, project, client_secret, api_endpoint, credentials): @cli.command() -@click.option('--model', required=True) +@click.option('--model', required=True, + help='Enter a globally-unique identifier for this device model; ' + 'you should use your project ID as a prefix to help avoid ' + 'collisions over the range of all projects (for example, ' + '"my-dev-project-my-led1").') @click.option('--type', type=click.Choice(['LIGHT', 'SWITCH', 'OUTLET']), - required=True) -@click.option('--trait', multiple=True) -@click.option('--manufacturer', required=True) -@click.option('--product-name', required=True) -@click.option('--description') -@click.option('--device', required=True) -@click.option('--nickname') + required=True, + help='Select the type of device hardware that best aligns with ' + 'your device. Select LIGHT if none of the listed choices aligns ' + 'with your device.') +@click.option('--trait', multiple=True, + help='Add traits (abilities) that the device supports. Pass ' + 'this flag multiple times to create a list of traits. Refer to ' + 'https://developers.google.com/assistant/sdk for a list of ' + 'supported traits.') +@click.option('--manufacturer', required=True, + help='Enter the manufacturer\'s name in this field (for ' + 'example, "Assistant SDK developer"). This information may be ' + 'shown in the Assistant settings and internal analytics.') +@click.option('--product-name', required=True, + help='Enter the product name in this field (for example, ' + '"Assistant SDK light").') +@click.option('--description', + help='Enter a description of the product in this field (for ' + 'example, "Assistant SDK light device").') +@click.option('--device', required=True, + help='Enter an identifier for the device instance. This ID must ' + 'be unique within all of the devices registered under the same ' + 'Google Developer project.') +@click.option('--nickname', + help='Enter a nickname for the device. You can use this name ' + 'when talking to your Assistant to refer to this device.') @click.pass_context def register(ctx, model, type, trait, manufacturer, product_name, description, device, nickname): + """Registers a device model and instance. + + Device model and instance fields can only contain letters, numbers, and the + following symbols: period (.), hyphen (-), underscore (_), space ( ) and + plus (+). The first character of a field must be a letter or number. + """ ctx.invoke(register_model, model=model, type=type, trait=trait, manufacturer=manufacturer, @@ -103,16 +153,40 @@ def register(ctx, model, type, trait, manufacturer, product_name, description, @cli.command('register-model') -@click.option('--model', required=True) +@click.option('--model', required=True, + help='Enter a globally-unique identifier for this device model; ' + 'you should use your project ID as a prefix to help avoid ' + 'collisions over the range of all projects (for example, ' + '"my-dev-project-my-led1").') @click.option('--type', type=click.Choice(['LIGHT', 'SWITCH', 'OUTLET']), - required=True) -@click.option('--trait', multiple=True) -@click.option('--manufacturer', required=True) -@click.option('--product-name', required=True) -@click.option('--description') + required=True, + help='Select the type of device hardware that best aligns with ' + 'your device. Select LIGHT if none of the listed choices aligns ' + 'with your device.') +@click.option('--trait', multiple=True, + help='Add traits (abilities) that the device supports. Pass ' + 'this flag multiple times to create a list of traits. Refer to ' + 'https://developers.google.com/assistant/sdk for a list of ' + 'supported traits.') +@click.option('--manufacturer', required=True, + help='Enter the manufacturer\'s name in this field (for ' + 'example, "Assistant SDK developer"). This information may be ' + 'shown in the Assistant settings and internal analytics.') +@click.option('--product-name', required=True, + help='Enter the product name in this field (for example, ' + '"Assistant SDK light").') +@click.option('--description', + help='Enter a description of the product in this field (for ' + 'example, "Assistant SDK light device").') @click.pass_context def register_model(ctx, model, type, trait, manufacturer, product_name, description): + """Registers a device model. + + Device model and instance fields can only contain letters, numbers, and the + following symbols: period (.), hyphen (-), underscore (_), space ( ) and + plus (+). The first character of a field must be a letter or number. + """ session = ctx.obj['SESSION'] model_base_url = '/'.join([ctx.obj['API_URL'], 'deviceModels']) @@ -145,11 +219,24 @@ def register_model(ctx, model, type, trait, @cli.command('register-device') -@click.option('--device', required=True) -@click.option('--model', required=True) -@click.option('--nickname') +@click.option('--device', required=True, + help='Enter an identifier for the device instance. This ID must ' + 'be unique within all of the devices registered under the same ' + 'Google Developer project.') +@click.option('--model', required=True, + help='Enter the identifier for an existing device model. This ' + 'new device instance will be associated with this device model.') +@click.option('--nickname', + help='Enter a nickname for the device. You can use this name ' + 'when talking to your Assistant to refer to this device.') @click.pass_context def register_device(ctx, device, model, nickname): + """Registers a device instance under an existing device model. + + Device model and instance fields can only contain letters, numbers, and the + following symbols: period (.), hyphen (-), underscore (_), space ( ) and + plus (+). The first character of a field must be a letter or number. + """ session = ctx.obj['SESSION'] device_base_url = '/'.join([ctx.obj['API_URL'], 'devices']) @@ -177,11 +264,16 @@ def register_device(ctx, device, model, nickname): @cli.command() -@click.option('--model', 'resource', flag_value='deviceModels', required=True) -@click.option('--device', 'resource', flag_value='devices', required=True) +@click.option('--model', 'resource', flag_value='deviceModels', required=True, + help='Enter the identifier for an existing device model.') +@click.option('--device', 'resource', flag_value='devices', required=True, + help='Enter the identifier for an existing device instance.') @click.argument('id') @click.pass_context def get(ctx, resource, id): + """Gets all of the information (fields) for a given device model or + instance. + """ session = ctx.obj['SESSION'] url = '/'.join([ctx.obj['API_URL'], resource, id]) r = session.get(url) @@ -195,6 +287,10 @@ def get(ctx, resource, id): @click.option('--device', 'resource', flag_value='devices', required=True) @click.pass_context def list(ctx, resource): + """Lists all of the device models and/or instances associated with the + current Google Developer project. To change the current project, use the + devicetool's --project flag. + """ session = ctx.obj['SESSION'] url = '/'.join([ctx.obj['API_URL'], resource]) r = session.get(url) From 067985eadbe2bc234bb951e7f716ab0d19b449c3 Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Tue, 7 Nov 2017 15:19:26 +0900 Subject: [PATCH 09/35] google-assistant-sdk/devicetool: add delete command Bug: 67062564 Change-Id: I985e023f0455c01ffd6b9f7f3e3a7d7055ab2576 --- .../googlesamples/assistant/grpc/devicetool.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py index 0e22d8b..8921da1 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py @@ -282,6 +282,24 @@ def get(ctx, resource, id): click.echo(r.text) +@cli.command() +@click.option('--model', 'resource', flag_value='deviceModels', required=True, + help='Enter the identifier for an existing device model.') +@click.option('--device', 'resource', flag_value='devices', required=True, + help='Enter the identifier for an existing device instance.') +@click.argument('id') +@click.pass_context +def delete(ctx, resource, id): + """Delete given device model or instance. + """ + session = ctx.obj['SESSION'] + url = '/'.join([ctx.obj['API_URL'], resource, id]) + r = session.delete(url) + if r.status_code != 200: + raise failed_request_exception('failed to delete resource', r) + click.echo(r.text) + + @cli.command() @click.option('--model', 'resource', flag_value='deviceModels', required=True) @click.option('--device', 'resource', flag_value='devices', required=True) From a7361603b41a18447793e7e6760131f131c8b7b6 Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Tue, 7 Nov 2017 16:55:20 +0900 Subject: [PATCH 10/35] google-assistant-grpc: regenerate proto bindings Bug: 67009650 Change-Id: Id5706b01e5dadca205188bf52fac2e163964ad93 --- .../v1alpha1/embedded_assistant_pb2.py | 280 +++--------------- .../v1alpha1/embedded_assistant_pb2_grpc.py | 1 + .../v1alpha1/embedded_assistant.proto | 63 ++-- .../assistant/grpc/pushtotalk.py | 9 +- 4 files changed, 78 insertions(+), 275 deletions(-) diff --git a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py index 544e712..205ca91 100644 --- a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py +++ b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2.py @@ -21,7 +21,7 @@ name='google/assistant/embedded/v1alpha1/embedded_assistant.proto', package='google.assistant.embedded.v1alpha1', syntax='proto3', - serialized_pb=_b('\n;google/assistant/embedded/v1alpha1/embedded_assistant.proto\x12\"google.assistant.embedded.v1alpha1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/rpc/status.proto\"\xbe\x02\n\x0e\x43onverseConfig\x12J\n\x0f\x61udio_in_config\x18\x01 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.AudioInConfig\x12L\n\x10\x61udio_out_config\x18\x02 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.AudioOutConfig\x12I\n\x0e\x63onverse_state\x18\x03 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.ConverseState\x12G\n\rdevice_config\x18\x04 \x01(\x0b\x32\x30.google.assistant.embedded.v1alpha1.DeviceConfig\"\xb6\x01\n\rAudioInConfig\x12L\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32:.google.assistant.embedded.v1alpha1.AudioInConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\"<\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\"\xe3\x01\n\x0e\x41udioOutConfig\x12M\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32;.google.assistant.embedded.v1alpha1.AudioOutConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x19\n\x11volume_percentage\x18\x03 \x01(\x05\"L\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0f\n\x0bOPUS_IN_OGG\x10\x03\"+\n\rConverseState\x12\x1a\n\x12\x63onversation_state\x18\x01 \x01(\x0c\"\x1e\n\x08\x41udioOut\x12\x12\n\naudio_data\x18\x01 \x01(\x0c\"\xbd\x02\n\x0e\x43onverseResult\x12\x1b\n\x13spoken_request_text\x18\x01 \x01(\t\x12\x1c\n\x14spoken_response_text\x18\x02 \x01(\t\x12\x1a\n\x12\x63onversation_state\x18\x03 \x01(\x0c\x12Z\n\x0fmicrophone_mode\x18\x04 \x01(\x0e\x32\x41.google.assistant.embedded.v1alpha1.ConverseResult.MicrophoneMode\x12\x19\n\x11volume_percentage\x18\x05 \x01(\x05\"]\n\x0eMicrophoneMode\x12\x1f\n\x1bMICROPHONE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x43LOSE_MICROPHONE\x10\x01\x12\x14\n\x10\x44IALOG_FOLLOW_ON\x10\x02\"\x7f\n\x0f\x43onverseRequest\x12\x44\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseConfigH\x00\x12\x12\n\x08\x61udio_in\x18\x02 \x01(\x0cH\x00\x42\x12\n\x10\x63onverse_request\"\xb5\x03\n\x10\x43onverseResponse\x12#\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12T\n\nevent_type\x18\x02 \x01(\x0e\x32>.google.assistant.embedded.v1alpha1.ConverseResponse.EventTypeH\x00\x12\x41\n\taudio_out\x18\x03 \x01(\x0b\x32,.google.assistant.embedded.v1alpha1.AudioOutH\x00\x12I\n\rdevice_action\x18\t \x01(\x0b\x32\x30.google.assistant.embedded.v1alpha1.DeviceActionH\x00\x12\x44\n\x06result\x18\x05 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseResultH\x00\"=\n\tEventType\x12\x1a\n\x16\x45VENT_TYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x45ND_OF_UTTERANCE\x10\x01\x42\x13\n\x11\x63onverse_response\"!\n\x0c\x44\x65viceConfig\x12\x11\n\tdevice_id\x18\x01 \x01(\t\"+\n\x0c\x44\x65viceAction\x12\x1b\n\x13\x64\x65vice_request_json\x18\x02 \x01(\t2\x8e\x01\n\x11\x45mbeddedAssistant\x12y\n\x08\x43onverse\x12\x33.google.assistant.embedded.v1alpha1.ConverseRequest\x1a\x34.google.assistant.embedded.v1alpha1.ConverseResponse(\x01\x30\x01\x42\x86\x01\n&com.google.assistant.embedded.v1alpha1B\x0e\x41ssistantProtoP\x01ZJgoogle.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embeddedb\x06proto3') + serialized_pb=_b('\n;google/assistant/embedded/v1alpha1/embedded_assistant.proto\x12\"google.assistant.embedded.v1alpha1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/rpc/status.proto\"\xbe\x02\n\x0e\x43onverseConfig\x12J\n\x0f\x61udio_in_config\x18\x01 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.AudioInConfig\x12L\n\x10\x61udio_out_config\x18\x02 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.AudioOutConfig\x12I\n\x0e\x63onverse_state\x18\x03 \x01(\x0b\x32\x31.google.assistant.embedded.v1alpha1.ConverseState\x12G\n\rdevice_config\x18\x04 \x01(\x0b\x32\x30.google.assistant.embedded.v1alpha1.DeviceConfig\"\xb6\x01\n\rAudioInConfig\x12L\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32:.google.assistant.embedded.v1alpha1.AudioInConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\"<\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\"\xe3\x01\n\x0e\x41udioOutConfig\x12M\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32;.google.assistant.embedded.v1alpha1.AudioOutConfig.Encoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x19\n\x11volume_percentage\x18\x03 \x01(\x05\"L\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0f\n\x0bOPUS_IN_OGG\x10\x03\"+\n\rConverseState\x12\x1a\n\x12\x63onversation_state\x18\x01 \x01(\x0c\"\x1e\n\x08\x41udioOut\x12\x12\n\naudio_data\x18\x01 \x01(\x0c\"\xbd\x02\n\x0e\x43onverseResult\x12\x1b\n\x13spoken_request_text\x18\x01 \x01(\t\x12\x1c\n\x14spoken_response_text\x18\x02 \x01(\t\x12\x1a\n\x12\x63onversation_state\x18\x03 \x01(\x0c\x12Z\n\x0fmicrophone_mode\x18\x04 \x01(\x0e\x32\x41.google.assistant.embedded.v1alpha1.ConverseResult.MicrophoneMode\x12\x19\n\x11volume_percentage\x18\x05 \x01(\x05\"]\n\x0eMicrophoneMode\x12\x1f\n\x1bMICROPHONE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x43LOSE_MICROPHONE\x10\x01\x12\x14\n\x10\x44IALOG_FOLLOW_ON\x10\x02\"\x7f\n\x0f\x43onverseRequest\x12\x44\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseConfigH\x00\x12\x12\n\x08\x61udio_in\x18\x02 \x01(\x0cH\x00\x42\x12\n\x10\x63onverse_request\"\xb5\x03\n\x10\x43onverseResponse\x12#\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12T\n\nevent_type\x18\x02 \x01(\x0e\x32>.google.assistant.embedded.v1alpha1.ConverseResponse.EventTypeH\x00\x12\x41\n\taudio_out\x18\x03 \x01(\x0b\x32,.google.assistant.embedded.v1alpha1.AudioOutH\x00\x12I\n\rdevice_action\x18\t \x01(\x0b\x32\x30.google.assistant.embedded.v1alpha1.DeviceActionH\x00\x12\x44\n\x06result\x18\x05 \x01(\x0b\x32\x32.google.assistant.embedded.v1alpha1.ConverseResultH\x00\"=\n\tEventType\x12\x1a\n\x16\x45VENT_TYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10\x45ND_OF_UTTERANCE\x10\x01\x42\x13\n\x11\x63onverse_response\":\n\x0c\x44\x65viceConfig\x12\x11\n\tdevice_id\x18\x01 \x01(\t\x12\x17\n\x0f\x64\x65vice_model_id\x18\x03 \x01(\t\"+\n\x0c\x44\x65viceAction\x12\x1b\n\x13\x64\x65vice_request_json\x18\x02 \x01(\t2\x8e\x01\n\x11\x45mbeddedAssistant\x12y\n\x08\x43onverse\x12\x33.google.assistant.embedded.v1alpha1.ConverseRequest\x1a\x34.google.assistant.embedded.v1alpha1.ConverseResponse(\x01\x30\x01\x42\x86\x01\n&com.google.assistant.embedded.v1alpha1B\x0e\x41ssistantProtoP\x01ZJgoogle.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embeddedb\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) @@ -509,6 +509,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='device_model_id', full_name='google.assistant.embedded.v1alpha1.DeviceConfig.device_model_id', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -522,7 +529,7 @@ oneofs=[ ], serialized_start=1856, - serialized_end=1889, + serialized_end=1914, ) @@ -552,8 +559,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1891, - serialized_end=1934, + serialized_start=1916, + serialized_end=1959, ) _CONVERSECONFIG.fields_by_name['audio_in_config'].message_type = _AUDIOINCONFIG @@ -679,245 +686,28 @@ DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n&com.google.assistant.embedded.v1alpha1B\016AssistantProtoP\001ZJgoogle.golang.org/genproto/googleapis/assistant/embedded/v1alpha1;embedded')) -try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities - - - class EmbeddedAssistantStub(object): - """Service that implements Google Assistant API. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Converse = channel.stream_stream( - '/google.assistant.embedded.v1alpha1.EmbeddedAssistant/Converse', - request_serializer=ConverseRequest.SerializeToString, - response_deserializer=ConverseResponse.FromString, - ) - - - class EmbeddedAssistantServicer(object): - """Service that implements Google Assistant API. - """ - - def Converse(self, request_iterator, context): - """Initiates or continues a conversation with the embedded assistant service. - Each call performs one round-trip, sending an audio request to the service - and receiving the audio response. Uses bidirectional streaming to receive - results, such as the `END_OF_UTTERANCE` event, while sending audio. - - A conversation is one or more gRPC connections, each consisting of several - streamed requests and responses. - For example, the user says *Add to my shopping list* and the assistant - responds *What do you want to add?*. The sequence of streamed requests and - responses in the first gRPC message could be: - - * ConverseRequest.config - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseResponse.event_type.END_OF_UTTERANCE - * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - - - The user then says *bagels* and the assistant responds - *OK, I've added bagels to your shopping list*. This is sent as another gRPC - connection call to the `Converse` method, again with streamed requests and - responses, such as: - - * ConverseRequest.config - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseResponse.event_type.END_OF_UTTERANCE - * ConverseResponse.result.microphone_mode.CLOSE_MICROPHONE - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - - Although the precise order of responses is not guaranteed, sequential - ConverseResponse.audio_out messages will always contain sequential portions - of audio. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_EmbeddedAssistantServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Converse': grpc.stream_stream_rpc_method_handler( - servicer.Converse, - request_deserializer=ConverseRequest.FromString, - response_serializer=ConverseResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.assistant.embedded.v1alpha1.EmbeddedAssistant', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class BetaEmbeddedAssistantServicer(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service that implements Google Assistant API. - """ - def Converse(self, request_iterator, context): - """Initiates or continues a conversation with the embedded assistant service. - Each call performs one round-trip, sending an audio request to the service - and receiving the audio response. Uses bidirectional streaming to receive - results, such as the `END_OF_UTTERANCE` event, while sending audio. - - A conversation is one or more gRPC connections, each consisting of several - streamed requests and responses. - For example, the user says *Add to my shopping list* and the assistant - responds *What do you want to add?*. The sequence of streamed requests and - responses in the first gRPC message could be: - - * ConverseRequest.config - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseResponse.event_type.END_OF_UTTERANCE - * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - - - The user then says *bagels* and the assistant responds - *OK, I've added bagels to your shopping list*. This is sent as another gRPC - connection call to the `Converse` method, again with streamed requests and - responses, such as: - - * ConverseRequest.config - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseResponse.event_type.END_OF_UTTERANCE - * ConverseResponse.result.microphone_mode.CLOSE_MICROPHONE - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - - Although the precise order of responses is not guaranteed, sequential - ConverseResponse.audio_out messages will always contain sequential portions - of audio. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaEmbeddedAssistantStub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service that implements Google Assistant API. - """ - def Converse(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): - """Initiates or continues a conversation with the embedded assistant service. - Each call performs one round-trip, sending an audio request to the service - and receiving the audio response. Uses bidirectional streaming to receive - results, such as the `END_OF_UTTERANCE` event, while sending audio. - - A conversation is one or more gRPC connections, each consisting of several - streamed requests and responses. - For example, the user says *Add to my shopping list* and the assistant - responds *What do you want to add?*. The sequence of streamed requests and - responses in the first gRPC message could be: - - * ConverseRequest.config - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseResponse.event_type.END_OF_UTTERANCE - * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - - - The user then says *bagels* and the assistant responds - *OK, I've added bagels to your shopping list*. This is sent as another gRPC - connection call to the `Converse` method, again with streamed requests and - responses, such as: - - * ConverseRequest.config - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseRequest.audio_in - * ConverseResponse.event_type.END_OF_UTTERANCE - * ConverseResponse.result.microphone_mode.CLOSE_MICROPHONE - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - * ConverseResponse.audio_out - - Although the precise order of responses is not guaranteed, sequential - ConverseResponse.audio_out messages will always contain sequential portions - of audio. - """ - raise NotImplementedError() - - - def beta_create_EmbeddedAssistant_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.assistant.embedded.v1alpha1.EmbeddedAssistant', 'Converse'): ConverseRequest.FromString, - } - response_serializers = { - ('google.assistant.embedded.v1alpha1.EmbeddedAssistant', 'Converse'): ConverseResponse.SerializeToString, - } - method_implementations = { - ('google.assistant.embedded.v1alpha1.EmbeddedAssistant', 'Converse'): face_utilities.stream_stream_inline(servicer.Converse), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_EmbeddedAssistant_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.assistant.embedded.v1alpha1.EmbeddedAssistant', 'Converse'): ConverseRequest.SerializeToString, - } - response_deserializers = { - ('google.assistant.embedded.v1alpha1.EmbeddedAssistant', 'Converse'): ConverseResponse.FromString, - } - cardinalities = { - 'Converse': cardinality.Cardinality.STREAM_STREAM, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.assistant.embedded.v1alpha1.EmbeddedAssistant', cardinalities, options=stub_options) -except ImportError: - pass + +_EMBEDDEDASSISTANT = _descriptor.ServiceDescriptor( + name='EmbeddedAssistant', + full_name='google.assistant.embedded.v1alpha1.EmbeddedAssistant', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=1962, + serialized_end=2104, + methods=[ + _descriptor.MethodDescriptor( + name='Converse', + full_name='google.assistant.embedded.v1alpha1.EmbeddedAssistant.Converse', + index=0, + containing_service=None, + input_type=_CONVERSEREQUEST, + output_type=_CONVERSERESPONSE, + options=None, + ), +]) +_sym_db.RegisterServiceDescriptor(_EMBEDDEDASSISTANT) + +DESCRIPTOR.services_by_name['EmbeddedAssistant'] = _EMBEDDEDASSISTANT + # @@protoc_insertion_point(module_scope) diff --git a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py index 7e424e8..4a9aff7 100644 --- a/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py +++ b/google-assistant-grpc/google/assistant/embedded/v1alpha1/embedded_assistant_pb2_grpc.py @@ -43,6 +43,7 @@ def Converse(self, request_iterator, context): * ConverseRequest.audio_in * ConverseRequest.audio_in * ConverseResponse.event_type.END_OF_UTTERANCE + * ConverseResponse.result.spoken_request_text "add to my shopping list" * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON * ConverseResponse.audio_out * ConverseResponse.audio_out diff --git a/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto b/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto index 08b487a..f556c9a 100755 --- a/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto +++ b/google-assistant-grpc/proto/google/assistant/embedded/v1alpha1/embedded_assistant.proto @@ -44,6 +44,7 @@ service EmbeddedAssistant { // * ConverseRequest.audio_in // * ConverseRequest.audio_in // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.spoken_request_text "add to my shopping list" // * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON // * ConverseResponse.audio_out // * ConverseResponse.audio_out @@ -75,7 +76,7 @@ service EmbeddedAssistant { // Specifies how to process the `ConverseRequest` messages. message ConverseConfig { // Specifies how to process the subsequent incoming audio. Required if - // audio_in bytes are to be provided. + // [ConverseRequest.audio_in][google.assistant.embedded.v1alpha1.ConverseRequest.audio_in] bytes will be provided in subsequent requests. AudioInConfig audio_in_config = 1; // *Required* Specifies how to format the audio that will be returned. @@ -90,7 +91,7 @@ message ConverseConfig { // Specifies how to process the `audio_in` data that will be provided in // subsequent requests. For recommended settings, see the Google Assistant SDK -// [best practices](https://developers.google.com/assistant/sdk/best-practices/audio). +// [best practices](https://developers.google.com/assistant/sdk/develop/grpc/best-practices/audio). message AudioInConfig { // Audio encoding of the data sent in the audio message. // Audio must be one-channel (mono). The only language supported is "en-US". @@ -160,11 +161,11 @@ message AudioOutConfig { // Provides information about the current dialog state. message ConverseState { - // *Required* The `conversation_state` value returned in the prior - // `ConverseResponse`. Omit (do not set the field) if there was no prior - // `ConverseResponse`. If there was a prior `ConverseResponse`, do not omit - // this field; doing so will end that conversation (and this new request will - // start a new conversation). + // *Required* This field must always be set to the + // [ConverseResult.conversation_state][google.assistant.embedded.v1alpha1.ConverseResult.conversation_state] value that was returned in the prior + // `Converse` rpc. It should only be omitted (field not set) if there was no + // prior `Converse` rpc because this is the first `Converse` rpc made by this + // device after it was first setup and/or factory-default-reset. bytes conversation_state = 1; } @@ -177,11 +178,8 @@ message AudioOut { bytes audio_data = 1; } -// The semantic result for the user's spoken query. Multiple of these messages -// could be received, for example one containing the recognized transcript in -// spoken_request_text followed by one containing the semantics of the response, -// i.e. containing the relevant data among conversation_state, microphone_mode, -// and volume_percentage. +// The dialog state resulting from the user's query. Multiple of these messages +// may be received. message ConverseResult { // Possible states of the microphone after a `Converse` RPC completes. enum MicrophoneMode { @@ -201,15 +199,17 @@ message ConverseResult { // *Output-only* The recognized transcript of what the user said. string spoken_request_text = 1; - // *Output-only* The text of the assistant's spoken response. This is only - // returned for an IFTTT action. + // *Output-only* The speech transcript of the Assistant's spoken response, + // i.e. the written form of what is returned in ConverseResponse.audio_out. + // This is only returned for an IFTTT action. string spoken_response_text = 2; - // *Output-only* State information for subsequent `ConverseRequest`. This + // *Output-only* State information for subsequent `Converse` rpc. This // value should be saved in the client and returned in the - // `conversation_state` with the next `ConverseRequest`. (The client does not - // need to interpret or otherwise use this value.) There is no need to save - // this information across device restarts. + // [ConverseState.conversation_state][google.assistant.embedded.v1alpha1.ConverseState.conversation_state] field with the next `Converse` rpc. + // (The client does not need to interpret or otherwise use this value.) This + // information should be saved across device reboots. However, this value + // should be cleared (not saved in the client) during a factory-default-reset. bytes conversation_state = 3; // *Output-only* Specifies the mode of the microphone after this `Converse` @@ -288,7 +288,7 @@ message ConverseResponse { // appropriate payloads and semantic parsing. DeviceAction device_action = 9; - // *Output-only* The final semantic result for the user's spoken query. + // *Output-only* Contains output related to the user's query. ConverseResult result = 5; } } @@ -296,23 +296,30 @@ message ConverseResponse { // The identification information for devices integrated with the Assistant. // These fields should be populated for any queries sent from 3P devices. message DeviceConfig { - // *Required* Unique identifier for the device. Example: DBCDW098234. This - // MUST match the device_id returned from device registration. This device_id - // is used matched against the user's registered devices to lookup the - // supported traits and capabilities of this device. + // *Required* Unique identifier for the device. The id length must be 128 + // characters or less. Example: DBCDW098234. This MUST match the device_id + // returned from device registration. This device_id is used to match against + // the user's registered devices to lookup the supported traits and + // capabilities of this device. This information should not change across + // device reboots. However, it should not be saved across + // factory-default-resets. string device_id = 1; + + // *Optional* The model of this device as registered in the Device Model + // Registration API. This is only required for syndication partners. + string device_model_id = 3; } -// The response returned to the device if any 3P Custom Device Grammar is -// triggered. The 3P Custom Device Grammar is enabled through the specific +// The response returned to the device if any Custom Device Grammar is +// triggered. The Custom Device Grammar is enabled through the specific // [DeviceConfig](google.assistant.embedded.v1alpha1.DeviceConfig) provided by -// this device, and should be handled appropriately. For example, a 3P device +// this device, and should be handled appropriately. For example, a device // which supports the customized query "do a dance" would receive a DeviceAction // with a JSON payload containing the semantics of the request. message DeviceAction { // JSON containing the device control response generated from the triggered 3P - // Custom Device Grammar. The format is given by the [action.devices.EXECUTE]( - // https://developers.google.com/actions/smarthome/create-app#actiondevicesexecute) + // Custom Device Grammar. The format is given by the + // [action.devices.EXECUTE](https://developers.google.com/actions/smarthome/create-app#actiondevicesexecute) // request type. string device_request_json = 2; } diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py index 2bd0813..7b59a4f 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py @@ -25,7 +25,10 @@ import google.auth.transport.requests import google.oauth2.credentials -from google.assistant.embedded.v1alpha1 import embedded_assistant_pb2 +from google.assistant.embedded.v1alpha1 import ( + embedded_assistant_pb2, + embedded_assistant_pb2_grpc +) from google.rpc import code_pb2 from tenacity import retry, stop_after_attempt, retry_if_exception @@ -75,7 +78,9 @@ def __init__(self, device_id, conversation_stream, channel, deadline_sec, self.conversation_state = None # Create Google Assistant API gRPC client. - self.assistant = embedded_assistant_pb2.EmbeddedAssistantStub(channel) + self.assistant = embedded_assistant_pb2_grpc.EmbeddedAssistantStub( + channel + ) self.deadline = deadline_sec self.device_handler = device_handler From 2d7f00931f0439ead5cbe61e9c94904be44a481a Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Tue, 7 Nov 2017 17:14:27 +0900 Subject: [PATCH 11/35] google-assistant-sdk/pushtotalk: add device-model-id flag Bug: 67009650 Change-Id: I97d1ee58147ebcad3e68ec3bdf37c2d9b73b09d8 --- .../assistant/grpc/pushtotalk.py | 20 ++++++++++++------- google-assistant-sdk/nox.py | 1 + 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py index 7b59a4f..94b06cd 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py @@ -55,7 +55,8 @@ class SampleAssistant(object): """Sample Assistant that supports conversations and device actions. Args: - device_id: identifier of the device. + device_model_id: identifier of the device model. + device_id: identifier of the registered device instance. conversation_stream(ConversationStream): audio stream for recording query and playing back assistant answer. channel: authorized gRPC channel for connection to the @@ -64,8 +65,9 @@ class SampleAssistant(object): device_handler: callback for device actions. """ - def __init__(self, device_id, conversation_stream, channel, deadline_sec, - device_handler): + def __init__(self, device_model_id, device_id, conversation_stream, + channel, deadline_sec, device_handler): + self.device_model_id = device_model_id self.device_id = device_id self.conversation_stream = conversation_stream @@ -192,6 +194,7 @@ def gen_converse_requests(self): ), converse_state=converse_state, device_config=embedded_assistant_pb2.DeviceConfig( + device_model_id=self.device_model_id, device_id=self.device_id, ) ) @@ -212,9 +215,12 @@ def gen_converse_requests(self): default=os.path.join(click.get_app_dir('google-oauthlib-tool'), 'credentials.json'), help='Path to read OAuth2 credentials.') +@click.option('--device-model-id', required=True, + metavar='', + help='Unique device model identifier.') @click.option('--device-id', required=True, - metavar='', show_default=True, - help='Unique device instance identifier.') + metavar='', + help='Unique registered device instance identifier.') @click.option('--verbose', '-v', is_flag=True, default=False, help='Verbose logging.') @click.option('--input-audio-file', '-i', @@ -252,7 +258,7 @@ def gen_converse_requests(self): help='gRPC deadline in seconds') @click.option('--once', default=False, is_flag=True, help='Force termination after a single conversation.') -def main(api_endpoint, credentials, device_id, verbose, +def main(api_endpoint, credentials, device_model_id, device_id, verbose, input_audio_file, output_audio_file, audio_sample_rate, audio_sample_width, audio_iter_size, audio_block_size, audio_flush_size, @@ -342,7 +348,7 @@ def onoff(on): else: logging.info('Turning device off') - with SampleAssistant(device_id, conversation_stream, + with SampleAssistant(device_model_id, device_id, conversation_stream, grpc_channel, grpc_deadline, device_handler) as assistant: # If file arguments are supplied: diff --git a/google-assistant-sdk/nox.py b/google-assistant-sdk/nox.py index d825107..94a7b87 100644 --- a/google-assistant-sdk/nox.py +++ b/google-assistant-sdk/nox.py @@ -68,6 +68,7 @@ def migrate_credentials(old, new): temp_dir = tempfile.mkdtemp() audio_out_file = os.path.join(temp_dir, 'out.raw') session.run('python', '-m', 'googlesamples.assistant.grpc.pushtotalk', + '--device-model-id', 'test-device-model', '--device-id', 'test-device', '-i', 'tests/data/whattimeisit.riff', '-o', audio_out_file) From 8e1c653984422a9f9d7387aee71bd655a2b6e95c Mon Sep 17 00:00:00 2001 From: Nick Felker Date: Tue, 7 Nov 2017 11:33:51 -0800 Subject: [PATCH 12/35] Replaces raw JSON output with a more user-friendly output Developers can still get verbose JSON printing with `--verbose` Bug: 67062248 Change-Id: I804f7e47febfb10b4e6caa39ee3a972f4f874cdb --- .../assistant/grpc/devicetool.py | 80 +++++++++++++++---- 1 file changed, 63 insertions(+), 17 deletions(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py index 0e22d8b..8c34a5d 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py @@ -15,6 +15,7 @@ """Sample that implements device registration for the Google Assistant API.""" import json +import logging import os import click @@ -40,6 +41,27 @@ def failed_request_exception(message, r): r.text)) +# Prints out a device model in the terminal by parsing dict +def pretty_print_model(devicemodel): + PRETTY_PRINT_MODEL = """Device Model Id: %(deviceModelId)s + Project Id: %(projectId)s + Device Type: %(deviceType)s""" + logging.info(PRETTY_PRINT_MODEL % devicemodel) + for trait in devicemodel['traits']: + logging.info(' Trait %s' % trait) + logging.info('') # Newline + + +# Prints out a device instance in the terminal by parsing dict +def pretty_print_device(device): + logging.info('Device Instance Id: %s' % device['id']) + if 'nickname' in device: + logging.info(' Nickname: %s' % device['nickname']) + if 'modelId' in device: + logging.info(' Model: %s' % device['modelId']) + logging.info('') # Newline + + @click.group() @click.option('--project', help='Enter the Google Developer Project ID that you want to ' @@ -56,6 +78,8 @@ def failed_request_exception(message, r): 'tool will look for this file in the current directory (by ' 'searching for a file named after the client_id stored in the ' 'credentials file).') +@click.option('--verbose', flag_value=True, + help='Shows detailed JSON response') @click.option('--api-endpoint', default='embeddedassistant.googleapis.com', show_default=True, help='Hostname for the Google Assistant API. Do not use this ' @@ -70,7 +94,7 @@ def failed_request_exception(message, r): 'API. You can use this flag if the credentials were generated ' 'in a location that is different than the default.') @click.pass_context -def cli(ctx, project, client_secret, api_endpoint, credentials): +def cli(ctx, project, client_secret, verbose, api_endpoint, credentials): try: with open(credentials, 'r') as f: c = google.oauth2.credentials.Credentials(token=None, @@ -90,16 +114,18 @@ def cli(ctx, project, client_secret, api_endpoint, credentials): project = secret['installed']['project_id'] except Exception as e: raise click.ClickException('Error loading client secret: %s.\n' - 'Run the register tool' + 'Run the register tool ' 'with --client-secret ' 'or --project option.\n' - 'Or copy the %s file' + 'Or copy the %s file ' 'in the current directory.' % (e, client_secret)) ctx.obj['SESSION'] = google.auth.transport.requests.AuthorizedSession(c) ctx.obj['API_URL'] = ('https://%s/v1alpha2/projects/%s' % (api_endpoint, project)) ctx.obj['PROJECT_ID'] = project + logging.basicConfig(format='', + level=logging.DEBUG if verbose else logging.INFO) @cli.command() @@ -204,18 +230,20 @@ def register_model(ctx, model, type, trait, payload.setdefault('manifest', {})['productName'] = product_name if description: payload.setdefault('manifest', {})['deviceDescription'] = description + logging.debug(json.dumps(payload)) r = session.get(model_url) + logging.debug(r.text) if r.status_code == 200: - click.echo('updating existing device model: %s' % model) + click.echo('Updating existing device model: %s' % model) r = session.put(model_url, data=json.dumps(payload)) elif r.status_code in (400, 404): - click.echo('creating new device model') + click.echo('Creating new device model') r = session.post(model_base_url, data=json.dumps(payload)) else: - raise failed_request_exception('failed to check existing model', r) + raise failed_request_exception('Unknown error occurred', r) if r.status_code != 200: - raise failed_request_exception('failed to register model', r) - click.echo(r.text) + raise failed_request_exception('Failed to register model', r) + click.echo('Model %s successfully registered' % model) @cli.command('register-device') @@ -248,19 +276,21 @@ def register_device(ctx, device, model, nickname): if nickname: payload['nickname'] = nickname + logging.debug(json.dumps(payload)) r = session.get(device_url) if r.status_code == 200: - click.echo('updating existing device: %s' % device) + click.echo('Updating existing device: %s' % device) session.delete(device_url) r = session.post(device_base_url, data=json.dumps(payload)) elif r.status_code in (400, 404): - click.echo('creating new device') + click.echo('Creating new device') r = session.post(device_base_url, data=json.dumps(payload)) else: - raise failed_request_exception('failed to check existing device', r) + raise failed_request_exception('Failed to check existing device', r) if r.status_code != 200: - raise failed_request_exception('failed to register device', r) - click.echo(r.text) + raise failed_request_exception('Failed to register device', r) + click.echo('Device instance %s successfully registered' % device) + logging.debug(r.text) @cli.command() @@ -275,11 +305,18 @@ def get(ctx, resource, id): instance. """ session = ctx.obj['SESSION'] + url = '/'.join([ctx.obj['API_URL'], resource, id]) r = session.get(url) if r.status_code != 200: - raise failed_request_exception('failed to get resource', r) - click.echo(r.text) + raise failed_request_exception('Failed to get resource', r) + + response = json.loads(r.text) + if resource == 'deviceModels': + pretty_print_model(response) + elif resource == 'devices': + pretty_print_device(response) + logging.debug(r.text) @cli.command() @@ -292,11 +329,20 @@ def list(ctx, resource): devicetool's --project flag. """ session = ctx.obj['SESSION'] + url = '/'.join([ctx.obj['API_URL'], resource]) r = session.get(url) if r.status_code != 200: - raise failed_request_exception('failed to list resources', r) - click.echo(r.text) + raise failed_request_exception('Failed to list resources', r) + + response = json.loads(r.text) + logging.debug(r.text) + if resource == 'deviceModels': + for devicemodel in response['deviceModels']: + pretty_print_model(devicemodel) + elif resource == 'devices': + for device in response['devices']: + pretty_print_device(device) def main(): From 6955786f2866158f433cd1ad69339a3fb612b4af Mon Sep 17 00:00:00 2001 From: Nick Felker Date: Wed, 8 Nov 2017 14:40:30 -0800 Subject: [PATCH 13/35] Cleans up error messages in devicetool Bug: 69009843 Change-Id: I7b3c6acd34911189df7ece3e4a10febbc4d85435 --- .../googlesamples/assistant/grpc/devicetool.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py index 8c34a5d..45fae06 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/devicetool.py @@ -27,12 +27,8 @@ def failed_request_exception(message, r): """Build ClickException from a failed request.""" try: resp = json.loads(r.text) - message = '%s: %d %s' % (message, - resp['error']['code'], - resp['error']['message']) - if 'details' in resp['error']: - details = '\n'.join(d['detail'] for d in resp['error']['details']) - message += ' ' + details + message = '%s: %d\n%s' % (message, resp['error']['code'], + resp['error']['message']) return click.ClickException(message) except ValueError: # fallback on raw text response if error is not structured. From b6a8f3a1995e091c7c64ab5070511ca58f8d6b85 Mon Sep 17 00:00:00 2001 From: Johan Euphrosine Date: Thu, 9 Nov 2017 14:47:31 +0900 Subject: [PATCH 14/35] google-assistant-sdk: pushtotalk add device registration Bug: 68943313 Change-Id: I05f130c51b955a73695adac9814255915db3d924 --- .../assistant/grpc/pushtotalk.py | 62 +++++++++++++++++-- 1 file changed, 56 insertions(+), 6 deletions(-) diff --git a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py index 94b06cd..691dc04 100644 --- a/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py +++ b/google-assistant-sdk/googlesamples/assistant/grpc/pushtotalk.py @@ -17,7 +17,9 @@ import concurrent.futures import json import logging +import os import os.path +import uuid import click import grpc @@ -215,12 +217,26 @@ def gen_converse_requests(self): default=os.path.join(click.get_app_dir('google-oauthlib-tool'), 'credentials.json'), help='Path to read OAuth2 credentials.') -@click.option('--device-model-id', required=True, +@click.option('--project', + metavar='', + help=('Google Developer Project ID used for registration ' + 'if --device-id is not specified')) +@click.option('--device-model-id', metavar='', - help='Unique device model identifier.') -@click.option('--device-id', required=True, + help=(('Unique device model identifier, ' + 'if not specifed, it is read from --device-config'))) +@click.option('--device-id', metavar='', - help='Unique registered device instance identifier.') + help=(('Unique registered device instance identifier, ' + 'if not specified, it is read from --device-config, ' + 'if no device_config found: a new device is registered ' + 'using a unique id and a new device config is saved'))) +@click.option('--device-config', show_default=True, + metavar='', + default=os.path.join( + click.get_app_dir('googlesamples-assistant'), + 'device_config.json'), + help='Path to save and restore the device configuration') @click.option('--verbose', '-v', is_flag=True, default=False, help='Verbose logging.') @click.option('--input-audio-file', '-i', @@ -247,7 +263,7 @@ def gen_converse_requests(self): default=audio_helpers.DEFAULT_AUDIO_DEVICE_BLOCK_SIZE, metavar='