ArCore Sceneform: Play .mp4 video when detect imag

2019-07-13 17:59发布

问题:

When I find an image, I want to place a text and a video above it. The text view is placed on the scene but the video is not, it is just added to my main layout in the middle. I'm using the component VideoView, I'm not sure that's the problem

override fun onCreate(savedInstanceState: Bundle?) {
         (....)
        arFragment!!.arSceneView.scene.addOnUpdateListener { this.onUpdateFrame(it) }
        arSceneView = arFragment!!.arSceneView

}

private fun onUpdateFrame(frameTime: FrameTime) {
    val frame = arFragment!!.arSceneView.arFrame

    val augmentedImages = frame.getUpdatedTrackables(AugmentedImage::class.java)

    for (augmentedImage in augmentedImages) {
        if (augmentedImage.trackingState == TrackingState.TRACKING) {

            if (augmentedImage.name.contains("car") && !modelCarAdded) {
                renderView(arFragment!!,
                        augmentedImage.createAnchor(augmentedImage.centerPose))
                modelCarAdded = true
            }
        }
    }

}

The text_info is only a TextView component, the video_youtube is a RelativeLayout with the VideoView inside.

   private fun renderView(fragment: ArFragment, anchor: Anchor) {
    //WORKING
    ViewRenderable.builder()
            .setView(this, R.layout.text_info)
            .build()
            .thenAccept { renderable ->
                (renderable.view as TextView).text = "Example"
                addNodeToScene(fragment, anchor, renderable, Vector3(0f, 0.2f, 0f))

            }
            .exceptionally { throwable ->
                val builder = AlertDialog.Builder(this)
                builder.setMessage(throwable.message)
                        .setTitle("Error!")
                val dialog = builder.create()
                dialog.show()
                null
            }
    //NOT WORKING
    ViewRenderable.builder()
            .setView(this, R.layout.video_youtube)
            .build()
            .thenAccept { renderable ->
                val view = renderable.view
                videoRenderable = renderable
                val path = "android.resource://" + packageName + "/" + R.raw.googlepixel
                view.video_player.setVideoURI(Uri.parse(path))
                renderable.material.setExternalTexture("videoTexture", texture)
                val videoNode = addNodeToScene(fragment, anchor, renderable, Vector3(0.2f, 0.5f, 0f))
                if (!view.video_player.isPlaying) {
                    view.video_player.start()
                    texture
                            .surfaceTexture
                            .setOnFrameAvailableListener {
                                videoNode.renderable = videoRenderable
                                texture.surfaceTexture.setOnFrameAvailableListener(null)
                            }
                } else {
                    videoNode.renderable = videoRenderable
                }

            }
            .exceptionally { throwable ->
                null
            }
}

private fun addNodeToScene(fragment: ArFragment, anchor: Anchor, renderable: Renderable, vector3: Vector3): Node {
        val anchorNode = AnchorNode(anchor)
        val node = TransformableNode(fragment.transformationSystem)
        node.renderable = renderable
        node.setParent(anchorNode)
        node.localPosition = vector3
        fragment.arSceneView.scene.addChild(anchorNode)
        return node
    }

I tried using the Chroma Key Video example but I don't want the white parts of the video to be transparent. And I'm not sure that I need the model (.sfb) to show a video.

回答1:

I used the ChromaKey sample as a starting point.

First I changed the custom material used for the video by adding a flag to disable the chromakey filtering.

material {
    "name" : "Chroma Key Video Material",
    "defines" : [
        "baseColor"
    ],
    "parameters" : [
        {
           // The texture displaying the frames of the video.
           "type" : "samplerExternal",
           "name" : "videoTexture"
        },
        {
            // The color to filter out of the video.
            "type" : "float4",
            "name" : "keyColor"
        },
        {
            "type" : "bool",
            "name" : "disableChromaKey",
        }
    ],
    "requires" : [
        "position",
        "uv0"
    ],
    "shadingModel" : "unlit",
    // Blending is "masked" instead of "transparent" so that the shadows account for the
    // transparent regions of the video instead of just the shape of the mesh.
    "blending" : "masked",
    // Material is double sided so that the video is visible when walking behind it.
    "doubleSided" : true
}

fragment {
    vec3 desaturate(vec3 color, float amount) {
        // Convert color to grayscale using Luma formula:
        // https://en.wikipedia.org/wiki/Luma_%28video%29
        vec3 gray = vec3(dot(vec3(0.2126, 0.7152, 0.0722), color));

        return vec3(mix(color, gray, amount));
    }

    void material(inout MaterialInputs material) {
        prepareMaterial(material);

        vec2 uv = getUV0();

        if (!gl_FrontFacing) {
          uv.x = 1.0 - uv.x;
        }

        vec4 color = texture(materialParams_videoTexture, uv).rgba;

        if (!materialParams.disableChromaKey) {
            vec3 keyColor = materialParams.keyColor.rgb;

            float threshold = 0.675;
            float slope = 0.2;

            float distance = abs(length(abs(keyColor - color.rgb)));
            float edge0 = threshold * (1.0 - slope);
            float alpha = smoothstep(edge0, threshold, distance);
            color.rgb = desaturate(color.rgb, 1.0 - (alpha * alpha * alpha));

            material.baseColor.a = alpha;
            material.baseColor.rgb = inverseTonemapSRGB(color.rgb);
            material.baseColor.rgb *= material.baseColor.a;
        } else {
            material.baseColor = color;
        }
    }
}

Then set `disableChromaKey' to false in the .sfa file:

 materials: [
    {
      name: 'DefaultMaterial',
      parameters: [
        {
          videoTexture: {
            external_path: 'MISSING_PATH',
          },
        },
        {
          keyColor: [
            0,
            0,
            0,
            0,
          ],
        },
        {
            disableChromaKey : true,
        }
      ],
      source: 'sampledata/models/chroma_key_video_material.mat',
    },
  ],

Then I placed the video node based on the anchor from a hittest, and placed a ViewRenderable node above it for the text.

   private Node createVideoDisplay(final AnchorNode parent, Vector3 localPosition, String title) {
        // Create a node to render the video and add it to the anchor.
        Node videoNode = new Node();
        videoNode.setParent(parent);
        videoNode.setLocalPosition(localPosition);

        // Set the scale of the node so that the aspect ratio of the video is correct.
        float videoWidth = mediaPlayer.getVideoWidth();
        float videoHeight = mediaPlayer.getVideoHeight();
        videoNode.setLocalScale(
                new Vector3(
                        VIDEO_HEIGHT_METERS * (videoWidth / videoHeight),
                        VIDEO_HEIGHT_METERS, 1.0f));

        // Place the text above the video
        final float videoNodeHeight = VIDEO_HEIGHT_METERS+ localPosition.y;
        ViewRenderable.builder().setView(this,R.layout.video_title)
                .build().thenAccept(viewRenderable -> {
                   Node titleNode =  new Node();
                   titleNode.setLocalPosition(new Vector3(0,videoNodeHeight,0));
                   titleNode.setParent(parent);
                   titleNode.setRenderable(viewRenderable);
            ((TextView)viewRenderable.getView().findViewById(R.id.video_text))
                           .setText(title);
        });

        return videoNode;
    }