Vielen Dank für Ihren Besuch. Diese Seite ist momentan nur auf Englisch verfügbar. Wir arbeiten an der deutschen Version. Vielen Dank für Ihr Verständnis.

Context

Context is a container used to communicate the state of your client components to AVS. Context should reflect the state of client components just before the event is fired. For example, if your client is playing an audio stream and a user interrupts that stream to make a speech request, when the Recognize event is sent to AVS, Context should indicate that the AudioPlayer's playerActivity was playing.

The following example illustrates a SpeechRecognizer.Recognize event with all contexts included.

{
    "context": [
        {
            "header": {
                "namespace": "AudioPlayer",
                "name": "PlaybackState"
            },
            "payload": {
                "token": "{{STRING}}",
                "offsetInMilliseconds": {{LONG}},
                "playerActivity": "{{STRING}}"
            }
        },
        {
            "header": {
                "namespace": "SpeechRecognizer",
                "name": "RecognizerState"
            },
            "payload": {
                "wakeword": "ALEXA"
            }
        },
        {
            "header": {
                "namespace": "Notifications",
                "name": "IndicatorState"
            },
            "payload": {
                "isEnabled": {{BOOLEAN}},
                "isVisualIndicatorPersisted": {{BOOLEAN}}
            }
        },
        {
            "header": {
                "namespace": "Alerts",
                "name": "AlertsState"
            },
            "payload": {
                "allAlerts": [
                    {
                        "token": "{{STRING}}",
                        "type": "{{STRING}}",
                        "scheduledTime": "{{STRING}}"
                    }
                ],
                "activeAlerts": [
                    {
                        "token": "{{STRING}}",
                        "type": "{{STRING}}",
                        "scheduledTime": "{{STRING}}"
                    }
                ]
            }
        },
        {
            "header": {
                "namespace": "Speaker",
                "name": "VolumeState"
            },
            "payload": {
                "volume": {{LONG}},
                "muted": {{BOOLEAN}}
            }
        },
        {
            "header": {
                "namespace": "SpeechSynthesizer",
                "name": "SpeechState"
            },
            "payload": {
                "token": "{{STRING}}",
                "offsetInMilliseconds": {{LONG}},
                "playerActivity": "{{STRING}}"
            }
        }
    ],
    "event": {
        "header": {
            "namespace": "SpeechRecognizer",
            "name": "Recognize",
            "messageId": "{{STRING}}",
            "dialogRequestId": "{{STRING}}"
        },
        "payload": {
            "profile": "{{STRING}}",
            "format": "{{STRING}}",
            "initiator": {
                "type": "{{STRING}}",
                "payload": {
                    "wakeWordIndices": {
                        "startIndexInSamples": {{LONG}},
                        "endIndexInSamples": {{LONG}}
                    }   
                }
            }
        }
    }
}

Interfaces with Context Objects

If support for the following interfaces are declared with the Capabilities API, state information must be reported to Alexa with each event that requires Context: