国产 无码 综合区,色欲AV无码国产永久播放,无码天堂亚洲国产AV,国产日韩欧美女同一区二区

Realitykit結(jié)合Speech實(shí)現(xiàn)語(yǔ)音控制AR機(jī)器人移動(dòng)(完整代碼)

這篇具有很好參考價(jià)值的文章主要介紹了Realitykit結(jié)合Speech實(shí)現(xiàn)語(yǔ)音控制AR機(jī)器人移動(dòng)(完整代碼)。希望對(duì)大家有所幫助。如果存在錯(cuò)誤或未考慮完全的地方,請(qǐng)大家不吝賜教,您也可以點(diǎn)擊"舉報(bào)違法"按鈕提交疑問(wèn)。

利用Apple最新的Realitykit搭配ARkit實(shí)現(xiàn)虛擬物體的放置,結(jié)合內(nèi)置的Speech庫(kù)實(shí)現(xiàn)語(yǔ)音的識(shí)別功能,將語(yǔ)音內(nèi)容轉(zhuǎn)為文本內(nèi)容,從而讓機(jī)器進(jìn)行運(yùn)動(dòng)。

大體思路:

1、配置并啟動(dòng)ARkit環(huán)境。

2、構(gòu)建Entity實(shí)體??梢杂肁pple官方的CreatingAPhotogrammetryCommandLineApp的代碼文檔來(lái)生成.usdz文件,從而建造自己想要的實(shí)體。

3、放置實(shí)體到現(xiàn)實(shí)環(huán)境中。通過(guò)raycast發(fā)射射線,通過(guò)轉(zhuǎn)化獲得現(xiàn)實(shí)世界中的x,y,z的坐標(biāo),從而把實(shí)體放在現(xiàn)實(shí)世界中。

4、實(shí)現(xiàn)機(jī)器人的運(yùn)動(dòng)。通過(guò)傳入文本信息,利用實(shí)體的transition屬性來(lái)進(jìn)行移動(dòng)。

5、通過(guò)SFSpeechRecognizer獲得語(yǔ)音識(shí)別允許。

6、創(chuàng)造音頻節(jié)點(diǎn),將輸入語(yǔ)音的設(shè)備設(shè)置為麥克風(fēng),將音頻結(jié)果初始化,放到SFSpeechAudioBufferRecognitionRequest里面。

7、進(jìn)行音頻結(jié)果的處理,轉(zhuǎn)化為文本,放到機(jī)器人運(yùn)動(dòng)中。

8、機(jī)器人完成運(yùn)動(dòng)

沒(méi)介紹的其他代碼就是為實(shí)體添加屬性,讓機(jī)器人能夠移動(dòng),放縮,旋轉(zhuǎn)。

還有一些手勢(shì)的識(shí)別:長(zhǎng)按添加新的實(shí)體,滑動(dòng)讓實(shí)體消失。

import UIKit
import RealityKit
import ARKit
import Speech
class ViewController: UIViewController {
    @IBOutlet var arView: ARView!
    
    var entity: Entity?
    var moveToLocation: Transform = Transform()
    let moveTime: Double = 5
    //語(yǔ)音識(shí)別
    let speechRecognizer: SFSpeechRecognizer? = SFSpeechRecognizer()
    let speechRequest = SFSpeechAudioBufferRecognitionRequest()
    var speechTask = SFSpeechRecognitionTask()
    // 音頻實(shí)例化
    let audioEngine = AVAudioEngine() //設(shè)立音頻節(jié)點(diǎn),處理音頻輸入和輸出
    let audioSession = AVAudioSession() //音頻記錄初始化
    override func viewDidLoad() {
        super.viewDidLoad()
        entity = try! Entity.loadModel(named: "toy_robot_vintage.usdz")
        entity?.generateCollisionShapes(recursive: true)
        //包裹起來(lái) 增加碰撞屬性
        arView.installGestures([.rotation,.scale,.translation], for: entity! as! HasCollision)
        //創(chuàng)建session
        startARSession()
        //創(chuàng)建手勢(shì) 將2d位置轉(zhuǎn)換為3d位置 獲取位置傳遞到下面的函數(shù)中
        arView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(handleTapLocation)))
        arView.addGestureRecognizer(UISwipeGestureRecognizer(target: self, action: #selector(handleSwipeLocation)))
        arView.addGestureRecognizer(UILongPressGestureRecognizer(target: self, action: #selector(handleLongPressLocation)))
        startSpeechRecognition()
        
    }
    @objc func handleTapLocation(_ recognizer:UITapGestureRecognizer){
        let tapLocation = recognizer.location(in: arView)
        //發(fā)射粒子 轉(zhuǎn)化為3d坐標(biāo)
        let result = arView.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        //得到x,y,z坐標(biāo)
        if let firstResult = result.first{
            let worldPosition = simd_make_float3(firstResult.worldTransform.columns.3)
            placeModelInWorld(object: entity!, position: worldPosition)
        }
    }
    @objc func handleSwipeLocation(_ recognizer: UISwipeGestureRecognizer){
        let longPressLocation = recognizer.location(in: arView)
        if let entity = arView.entity(at: longPressLocation){
            entity.anchor?.removeFromParent()
        }
    }
    @objc func handleLongPressLocation(_ recognizer: UILongPressGestureRecognizer){
        let doubleTapLocation = recognizer.location(in: arView)
        let result = arView.raycast(from: doubleTapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        //得到x,y,z坐標(biāo)
        if let firstResult = result.first{
            let worldPosition = simd_make_float3(firstResult.worldTransform.columns.3)
            if (arView.entity(at: doubleTapLocation) == nil){
                let objectAnchor = AnchorEntity(world: worldPosition)
                let entity1 = try! Entity.loadModel(named: "toy_robot_vintageOne.usdz")
                entity1.generateCollisionShapes(recursive: true)
                arView.installGestures([.translation,.rotation,.scale], for: entity1)
                objectAnchor.addChild(entity1)
                arView.scene.addAnchor(objectAnchor)
                
            }
        }
    }
    func startARSession(){
        arView.automaticallyConfigureSession = true
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = [.horizontal]
        configuration.environmentTexturing = .automatic
//        arView.debugOptions = .showAnchorGeometry
        arView.session.run(configuration)
    }
    func placeModelInWorld(object:Entity,position:SIMD3<Float>){
        let objectAnchor = AnchorEntity(world: position)
        objectAnchor.addChild(object)
        arView.scene.addAnchor(objectAnchor)
    }
    func rebotMove(direction: String){
        switch direction{
        case "forward":
            moveToLocation.translation = (entity!.transform.translation)+simd_float3(x:0,y:0,z:20)
            entity!.move(to: moveToLocation, relativeTo: entity!, duration: moveTime)
            walkAnimation(movementDuration: moveTime)
            print("moveForward")
        case "back":
            moveToLocation.translation = (entity!.transform.translation)+simd_float3(x:0,y:0,z:-20)
            entity!.move(to: moveToLocation, relativeTo: entity!, duration: moveTime)
            walkAnimation(movementDuration: moveTime)
        case "left":
            let rotateToAngle = simd_quatf(angle: GLKMathDegreesToRadians(90), axis: SIMD3(x:0,y:1,z:0))
            entity!.setOrientation(rotateToAngle, relativeTo: entity!)
        case "right":
            let rotateToAngle = simd_quatf(angle: GLKMathDegreesToRadians(-90), axis: SIMD3(x:0,y:1,z:0))
            entity!.setOrientation(rotateToAngle, relativeTo: entity!)
        default:
            print("沒(méi)有移動(dòng)指令")
        }
    }
    func walkAnimation(movementDuration: Double){
        if let rebotAnimation = entity!.availableAnimations.first{
            entity!.playAnimation(rebotAnimation.repeat(duration: movementDuration),transitionDuration: 0.5,startsPaused: false)
            print("Yes")
        }else{
            print("沒(méi)有相關(guān)動(dòng)畫(huà)")
        }
    }
    func startSpeechRecognition(){
        //獲得允許
        requestPermission()
        //記錄
        startAudioRecoding()
        //識(shí)別
        speechRecognize()
    }
    func requestPermission(){
        SFSpeechRecognizer.requestAuthorization { (authorizationStatus) in
            if(authorizationStatus  == .authorized){
                print("允許")
            }else if(authorizationStatus  == .denied){
                print("拒絕")
            }else if(authorizationStatus == .notDetermined){
                print("等待您的決定")
            }else if(authorizationStatus == .restricted){
                print("無(wú)法啟用")
            }
        }
    }
    func startAudioRecoding(){
        //創(chuàng)造輸入節(jié)點(diǎn)
        let node = audioEngine.inputNode
        let recordingFormate = node.outputFormat(forBus: 0)
        node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormate) { (buffer, _) in
            self.speechRequest.append(buffer)
        }
        //啟動(dòng)引擎
        
        do{
            //配置音頻會(huì)話為從麥克風(fēng)錄制
            try audioSession.setCategory(.record,mode: .measurement,options: .duckOthers)
            try audioSession.setActive(true,options: .notifyOthersOnDeactivation)
            audioEngine.prepare()
            try audioEngine.start()
        }
        catch{
            
        }
        
    }
    func speechRecognize(){
        guard let speechRecognizer = SFSpeechRecognizer()else{
            print("語(yǔ)音識(shí)別不可用")
            return
        }
        if (speechRecognizer.isAvailable == false){
            print("無(wú)法正常工作")
        }
        var count = 0
        speechTask = speechRecognizer.recognitionTask(with: speechRequest, resultHandler: { (result, error) in
            count += 1
            if(count == 1){
                guard let result = result else {
                    return
                }
                let recognizedText = result.bestTranscription.segments.last
                self.rebotMove(direction: recognizedText!.substring)
                print(recognizedText!.substring)
            }else if(count>=3){
                count = 0
            }
        })
    }
}

注釋已經(jīng)放在代碼中了,只需要導(dǎo)入自己的.usdz文件就可以運(yùn)行成功了。文章來(lái)源地址http://www.zghlxwxcb.cn/news/detail-421137.html

到了這里,關(guān)于Realitykit結(jié)合Speech實(shí)現(xiàn)語(yǔ)音控制AR機(jī)器人移動(dòng)(完整代碼)的文章就介紹完了。如果您還想了解更多內(nèi)容,請(qǐng)?jiān)谟疑辖撬阉鱐OY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!

本文來(lái)自互聯(lián)網(wǎng)用戶投稿,該文觀點(diǎn)僅代表作者本人,不代表本站立場(chǎng)。本站僅提供信息存儲(chǔ)空間服務(wù),不擁有所有權(quán),不承擔(dān)相關(guān)法律責(zé)任。如若轉(zhuǎn)載,請(qǐng)注明出處: 如若內(nèi)容造成侵權(quán)/違法違規(guī)/事實(shí)不符,請(qǐng)點(diǎn)擊違法舉報(bào)進(jìn)行投訴反饋,一經(jīng)查實(shí),立即刪除!

領(lǐng)支付寶紅包贊助服務(wù)器費(fèi)用

相關(guān)文章

覺(jué)得文章有用就打賞一下文章作者

支付寶掃一掃打賞

博客贊助

微信掃一掃打賞

請(qǐng)作者喝杯咖啡吧~博客贊助

支付寶掃一掃領(lǐng)取紅包,優(yōu)惠每天領(lǐng)

二維碼1

領(lǐng)取紅包

二維碼2

領(lǐng)紅包