Skip to content

Commit

Permalink
merge audio context changes from Matthias
Browse files Browse the repository at this point in the history
  • Loading branch information
timheuer committed Mar 24, 2017
1 parent 05a3cfa commit 50c0c97
Show file tree
Hide file tree
Showing 7 changed files with 112 additions and 2 deletions.
19 changes: 19 additions & 0 deletions Alexa.NET/Request/Context.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
using Alexa.NET.Request.Type;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;

namespace Alexa.NET.Request
{
public class Context
{
[JsonProperty("System")]
public AlexaSystem System { get; set; }

[JsonProperty("AudioPlayer")]
public PlaybackState AudioPlayer { get; set; }

}
}
20 changes: 20 additions & 0 deletions Alexa.NET/Request/Device.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;

namespace Alexa.NET.Request
{
public class Device
{
[JsonProperty("supportedInterfaces")]
public Dictionary<string, object> SupportedInterfaces { get; set; }

public bool IsInterfaceSupported(string interfaceName)
{
var hasInterface = SupportedInterfaces?.ContainsKey(interfaceName);
return (hasInterface.HasValue ? hasInterface.Value : false);
}
}
}
3 changes: 3 additions & 0 deletions Alexa.NET/Request/SkillRequest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ public class SkillRequest
[JsonProperty("session")]
public Session Session { get; set; }

[JsonProperty("context")]
public Context Context { get; set; }

[JsonProperty("request")]
[JsonConverter(typeof(RequestConverter))]
public Type.Request Request { get; set; }
Expand Down
12 changes: 12 additions & 0 deletions Alexa.NET/Request/SupportedInterfaces.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;

namespace Alexa.NET.Request
{
public class SupportedInterfaces
{

}
}
20 changes: 20 additions & 0 deletions Alexa.NET/Request/System.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;

namespace Alexa.NET.Request
{
public class AlexaSystem
{
[JsonProperty("application")]
public Application Application { get; set; }

[JsonProperty("user")]
public User User { get; set; }

[JsonProperty("device")]
public Device Device { get; set; }
}
}
2 changes: 1 addition & 1 deletion Alexa.NET/project.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "Alexa.NET",
"title": "Alexa.NET",
"version": "1.0.0-beta-4",
"version": "1.0.0-beta-5",
"authors": [ "Tim Heuer" ],
"description": "A simple .NET Core library for handling Alexa Skill request/responses.",

Expand Down
38 changes: 37 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ public SkillResponse FunctionHandler(SkillRequest input, ILambdaContext context)
// your function logic goes here
}
```
## Get the request type (Launch, Intent, etc)
## Get the request type (Launch, Intent, Audio, etc)
You most likely are going to want to get the type of request to know if it was the default launch, an intent, or maybe an audio request.
```csharp
// check what type of a request it is like an IntentRequest or a LaunchRequest
Expand All @@ -26,6 +26,10 @@ else if (requestType == typeof(Alexa.NET.Request.Type.LaunchRequest))
{
// default launch path executed
}
else if (requestType == typeof(AudioPlayerRequest))
{
// do some audio response stuff
}
```

## Get the intent and look at specifics
Expand All @@ -42,7 +46,24 @@ if (intentRequest.Intent.Name.Equals("MyIntentName"))
}
```

## Get an audio request and determine the action

Once you know it is an AudioPlayerRequest, you have to determine which one (playback started, finished, stopped, failed) and respond accordingly.

```csharp
// do some audio response stuff
var audioRequest = input.Request as AudioPlayerRequest;

// these are events sent when the audio state has changed on the device
// determine what exactly happened
if (audioRequest.AudioRequestType == AudioRequestType.PlaybackNearlyFinished)
{
// queue up another audio file
}
```

## Build a simple voice response

There are various types of responses you can build and this library provides a helper function to build them up. A simple one of having Alexa tell the user something using SSML may look like this:
```csharp
// build the speech response
Expand Down Expand Up @@ -87,7 +108,22 @@ var finalResponse = ResponseBuilder.Ask(speech, repromptBody);
return finalResponse;
```

## Play an audio file

If your skill is registered as an audio player, you can send directives (instructions to play, enqueue, or stop an audio stream).

```csharp
// create the speech response - you most likely will still have this
string audioUrl = "http://mydomain.com/myaudiofile.mp3";
string audioToken = "a token to describe the audio file";

var audioResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioUrl, audioToken);

return audioResponse
```

## Build a response without using helpers

If you do not want to use the helper Tell/Ask functions for the simple structure you
can build up the response manually using the ```Response``` and some ```IOutputSpeech``` objects.
```csharp
Expand Down

0 comments on commit 50c0c97

Please sign in to comment.