Class: Google::Cloud::Speech::V1p1beta1::Stream

Inherits:
Object
  • Object
show all
Includes:
MonitorMixin
Defined in:
lib/google/cloud/speech/v1p1beta1/stream.rb

Overview

Stream

A resource that represents the streaming requests and responses.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop
stream.wait_until_complete!

results = stream.results
result = results.first.alternatives.first
result.transcript #=> "how old is the Brooklyn Bridge"
result.confidence #=> 0.9826789498329163

Instance Method Summary collapse

Instance Method Details

#complete?Boolean

Whether all speech recognition results have been returned.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop

stream.wait_until_complete!
stream.complete? #=> true

results = stream.results
results.each do |result|
  result.alternatives.each do |alternative|
    puts alternative.transcript
    puts alternative.confidence
  end
end

Returns:

  • (Boolean)

    All speech recognition results have been returned.



234
235
236
237
238
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 234

def complete?
  synchronize do
    @complete
  end
end

#on_complete {|callback| ... } ⇒ Object

Register to be notified when the end of the audio stream has been reached.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# register callback for when stream has ended.
stream.on_complete do
  puts "Stream has ended."
end

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop

Yields:

  • (callback)

    The block to be called when the end of the audio stream has been reached.



425
426
427
428
429
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 425

def on_complete &block
  synchronize do
    @callbacks[:complete] << block
  end
end

#on_error {|callback| ... } ⇒ Object

Register to be notified of an error received during the stream.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# register callback for when an error is returned
stream.on_error do |error|
  puts "The following error occurred while streaming: #{error}"
  stream.stop
end

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop

Yields:

  • (callback)

    The block for accessing final results.

Yield Parameters:

  • error (Exception)

    The error raised.



526
527
528
529
530
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 526

def on_error &block
  synchronize do
    @callbacks[:error] << block
  end
end

#on_interim {|callback| ... } ⇒ Object

Register to be notified on the reception of an interim result.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  },
  interim_results: true
}
stream = speech_client.streaming_recognize(streaming_config)

# register callback for when an interim result is returned
stream.on_interim do |final_results, interim_results|
  interim_result = interim_results.first
  interim_alternative = interim_result.alternatives.first
  puts interim_alternative.transcript # "how old is the Brooklyn Bridge"
  puts interim_alternative.confidence # 0.9826789498329163
  puts interim_result.stability # 0.8999
end

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop

Yields:

  • (callback)

    The block for accessing final and interim results.

Yield Parameters:

  • final_results (Array<Result>)

    The final results.

  • interim_results (Array<Result>)

    The interim results.



322
323
324
325
326
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 322

def on_interim &block
  synchronize do
    @callbacks[:interim] << block
  end
end

#on_result {|callback| ... } ⇒ Object

Register to be notified on the reception of a final result.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# register callback for when a final result has been received
stream.on_result do |results|
  results.each do |result|
    puts result.alternatives.first.transcript
  end
end

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop
stream.wait_until_complete!

results = stream.results
result = results.first.alternatives.first
result.transcript #=> "how old is the Brooklyn Bridge"
result.confidence #=> 0.9826789498329163

Yields:

  • (callback)

    The block for accessing final results.

Yield Parameters:

  • results (Array<Result>)

    The final results.



377
378
379
380
381
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 377

def on_result &block
  synchronize do
    @callbacks[:result] << block
  end
end

#on_utterance {|callback| ... } ⇒ Object

Register to be notified when the server has detected the end of the user's speech utterance and expects no additional speech. Therefore, the server will not process additional audio. The client should stop sending additional audio data. This event only occurs when utterance is true.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  },
  single_utterance: true
}
stream = speech_client.streaming_recognize(streaming_config)

# register callback for when utterance has occurred.
stream.on_utterance do
  puts "Utterance has occurred."
  stream.stop
end

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop unless stream.stopped?

Yields:

  • (callback)

    The block to be called when the end of the audio stream has been reached.



478
479
480
481
482
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 478

def on_utterance &block
  synchronize do
    @callbacks[:utterance] << block
  end
end

#resultsArray<Result>

The speech recognition results for the audio.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop

results = stream.results
results.each do |result|
  result.alternatives.each do |alternative|
    puts alternative.transcript
    puts alternative.confidence
  end
end

Returns:

  • (Array<Result>)

    The transcribed text of audio recognized.



191
192
193
194
195
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 191

def results
  synchronize do
    @results
  end
end

#send(bytes) ⇒ Object

Sends audio content to the server.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop
stream.wait_until_complete!

results = stream.results
result = results.first.alternatives.first
result.transcript #=> "how old is the Brooklyn Bridge"
result.confidence #=> 0.9826789498329163

Parameters:

  • bytes (String)

    A string of binary audio data to be recognized. The data should be encoded as ASCII-8BIT.



125
126
127
128
129
130
131
132
133
134
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 125

def send bytes
  start # lazily call start if the stream wasn't started yet
  # TODO: do not send if stopped?
  synchronize do
    req = V1p1beta1::StreamingRecognizeRequest.new(
      audio_content: bytes.encode(Encoding::ASCII_8BIT)
    )
    @request_queue.push req
  end
end

#startObject

Starts the stream. The stream will be started in the first #send call.



74
75
76
77
78
79
80
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 74

def start
  return if @request_queue
  @request_queue = EnumeratorQueue.new(self)
  @request_queue.push @streaming_recognize_request

  Thread.new { background_run }
end

#started?boolean

Checks if the stream has been started.

Returns:

  • (boolean)

    true when started, false otherwise.



86
87
88
89
90
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 86

def started?
  synchronize do
    !(!@request_queue)
  end
end

#stopObject

Stops the stream. Signals to the server that no more data will be sent.



139
140
141
142
143
144
145
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 139

def stop
  synchronize do
    return if @request_queue.nil?
    @request_queue.push self
    @stopped = true
  end
end

#stopped?boolean

Checks if the stream has been stopped.

Returns:

  • (boolean)

    true when stopped, false otherwise.



151
152
153
154
155
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 151

def stopped?
  synchronize do
    @stopped
  end
end

#wait_until_complete!Object

Blocks until all speech recognition results have been returned.

Examples:

require "google/cloud/speech"

speech_client = Google::Cloud::Speech.new version: :v1p1beta1
streaming_config = {
  config: {
    encoding: :linear16,
    language_code: "en-US",
    sample_rate_hertz: 16000
  }
}
stream = speech_client.streaming_recognize(streaming_config)

# Stream 5 seconds of audio from the microphone
# Actual implementation of microphone input varies by platform
5.times do
  stream.send MicrophoneInput.read(32000)
end

stream.stop

stream.wait_until_complete!
stream.complete? #=> true

results = stream.results
results.each do |result|
  result.alternatives.each do |alternative|
    puts alternative.transcript
    puts alternative.confidence
  end
end


275
276
277
278
279
280
281
282
# File 'lib/google/cloud/speech/v1p1beta1/stream.rb', line 275

def wait_until_complete!
  complete_check = nil
  synchronize { complete_check = @complete }
  while complete_check.nil?
    sleep 1
    synchronize { complete_check = @complete }
  end
end