Skip to content
This repository has been archived by the owner on Sep 2, 2020. It is now read-only.

Commit

Permalink
Merge pull request #84 from wit-ai/language-fix
Browse files Browse the repository at this point in the history
Language fix
  • Loading branch information
hactar authored Jun 22, 2017
2 parents 78d0596 + 5aadc74 commit 87d9dc1
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 8 deletions.
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ The SDK can capture intents and entities from:

Add the following dependency to your Podfile:
```ruby
pod 'Wit', '~> 4.2.0'
pod 'Wit', '~> 4.2.1'
```

And then run the following command in your project home directory:
Expand Down Expand Up @@ -84,7 +84,9 @@ Sends an NSString to wit.ai for interpretation. Will call delegate methods for e


##### Recording audio
**The audio part of the API currently only supports the legacy GET /message API. If you are using stories (POST /converse) then this will not work, use ConverseString instead.
If you provide a WitSession to the WitMicButton.session then Wit-iOS-SDK will use the /converse endpoint (stories), else the /message endpoint will be used

Make sure to set Wit's speechRecognitionLocale to the same language as your Wit model. The default value is en-US (American English)

```objc
Starts a new recording session. [self.delegate witDidGraspIntent:] will be called once completed.
Expand Down
4 changes: 2 additions & 2 deletions Wit.podspec
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
Pod::Spec.new do |s|
s.name = "Wit"
s.version = "4.2.0"
s.version = "4.2.1"
s.summary = "Wit.AI Official SDK"
s.description = <<-DESC
Official Wit SDK, https://wit.ai/docs/ios-tutorial/
DESC
s.homepage = "https://github.com/wit-ai/wit-ios-sdk"
s.author = { "Willy Blandin" => "willy@wit.ai" }
s.source = { :git => "https://github.com/wit-ai/wit-ios-sdk.git", :tag => "4.2.0" }
s.source = { :git => "https://github.com/wit-ai/wit-ios-sdk.git", :tag => "4.2.1" }

s.platform = :ios, '7.0'
s.ios.deployment_target = "7.0"
Expand Down
1 change: 0 additions & 1 deletion Wit/WITMicButton.m
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,6 @@ - (void)initialize {
}

- (void)dealloc {
[self removeObserver:self forKeyPath:@"frame"];
[[NSNotificationCenter defaultCenter] removeObserver:self];
[self removeTarget:self action:@selector(buttonPressed:) forControlEvents:UIControlEventTouchUpInside];
}
Expand Down
2 changes: 1 addition & 1 deletion Wit/WITSFSpeechRecordingSession.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,5 @@
#import "WITRecordingSession.h"

@interface WITSFSpeechRecordingSession : WITRecordingSession
-(instancetype)initWithWitContext:(NSDictionary *)upContext vadEnabled:(WITVadConfig)vadEnabled withWitToken:(NSString *)witToken customData: (id) customData withDelegate:(id<WITRecordingSessionDelegate>)delegate;
-(instancetype)initWithWitContext:(NSDictionary *)upContext locale: (NSString *) locale vadEnabled:(WITVadConfig)vadEnabled withWitToken:(NSString *)witToken customData: (id) customData withDelegate:(id<WITRecordingSessionDelegate>)delegate;
@end
4 changes: 2 additions & 2 deletions Wit/WITSFSpeechRecordingSession.m
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ @implementation WITSFSpeechRecordingSession {
}


-(instancetype)initWithWitContext:(NSDictionary *)upContext vadEnabled:(WITVadConfig)vadEnabled withWitToken:(NSString *)witToken customData: (id) customData withDelegate:(id<WITRecordingSessionDelegate>)delegate {
-(instancetype)initWithWitContext:(NSDictionary *)upContext locale: (NSString *) locale vadEnabled:(WITVadConfig)vadEnabled withWitToken:(NSString *)witToken customData: (id) customData withDelegate:(id<WITRecordingSessionDelegate>)delegate {
self = [super init];
if (self) {
self.customData = customData;
Expand All @@ -38,7 +38,7 @@ -(instancetype)initWithWitContext:(NSDictionary *)upContext vadEnabled:(WITVadCo
//self.vad = [[WITVad alloc] init];


speechRecognizer = [[SFSpeechRecognizer alloc] initWithLocale:[NSLocale localeWithLocaleIdentifier:@"de-AT"]];
speechRecognizer = [[SFSpeechRecognizer alloc] initWithLocale:[NSLocale localeWithLocaleIdentifier:locale]];
audioEngine = [[AVAudioEngine alloc] init];
average1 = 0.0;
average2 = 0.0;
Expand Down
9 changes: 9 additions & 0 deletions Wit/Wit.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,15 @@
*/
@property (nonatomic, copy) NSString *accessToken;

/**
On iOS 10 and above wit-ios-sdk uses Apple's speech recognition. The speech recognition
needs to know which locale to use for recognition. A list of supported locales can be found via:
https://developer.apple.com/documentation/speech/sfspeechrecognizer/1649889-supportedlocales
Note that this locale must match the language of your wit model.
The default value is @"en_US"
*/
@property (nonatomic, copy) NSString *speechRecognitionLocale;

/**
* Configure the voice activity detection algorithm:
* - WITVadConfigDisabled
Expand Down
2 changes: 2 additions & 0 deletions Wit/Wit.m
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ - (void)start {
- (void)start: (id)customData {
if ([SFSpeechRecognizer class]) {
self.recordingSession = [[WITSFSpeechRecordingSession alloc] initWithWitContext:self.state.context
locale: self.speechRecognitionLocale
vadEnabled:[Wit sharedInstance].detectSpeechStop withWitToken:[WITState sharedInstance].accessToken
customData: customData withDelegate:self];
} else {
Expand Down Expand Up @@ -285,6 +286,7 @@ - (void)initialize {
self.detectSpeechStop = WITVadConfigDetectSpeechStop;
self.vadTimeout = 7000;
self.vadSensitivity = 0;
self.speechRecognitionLocale = @"en-US";
}

- (instancetype)init {
Expand Down

0 comments on commit 87d9dc1

Please sign in to comment.