【Filament】纹理贴图

1 前言

        本文主要介绍使用 Filament 实现纹理贴图,读者如果对 Filament 不太熟悉,请回顾以下内容。

  • Filament环境搭建
  • 绘制三角形
  • 绘制矩形
  • 绘制圆形
  • 绘制立方体

        Filament 纹理坐标的 x、y 轴正方向分别朝右和朝上,其 y 轴正方向朝向与 OpenGL ES 和 libGDX 相反(详见【OpenGL ES】纹理贴图、【libGDX】Mesh纹理贴图),如下。

2 纹理贴图

        本文项目结构如下,完整代码资源 → Filament纹理贴图。 

2.1 自定义基类

        为方便读者将注意力聚焦在 Filament 的输入上,轻松配置复杂的环境依赖逻辑,笔者仿照 OpenGL ES 的写法,抽出了 FLSurfaceView 和 BaseModel 类。FLSurfaceView 与 GLSurfaceView 的功能类似,承载了渲染环境配置;BaseModel 中提供了一些 VertexBuffer、IndexBuffer、Material、Renderable 相关的工具类,方便子类直接使用这些工具类。

        build.gradle

...
android {...aaptOptions { // 在应用程序打包过程中不压缩的文件noCompress 'filamat', 'ktx'}
}dependencies {implementation fileTree(dir: '../libs', include: ['*.aar'])...
}

        说明:在项目根目录下的 libs 目录中,需要放入以下 aar 文件,它们源自Filament环境搭建中编译生成的 aar。

        FLSurfaceView.java

package com.zhyan8.texture.filament;import android.content.Context;
import android.graphics.Point;
import android.view.Choreographer;
import android.view.Surface;
import android.view.SurfaceView;import com.google.android.filament.Camera;
import com.google.android.filament.Engine;
import com.google.android.filament.EntityManager;
import com.google.android.filament.Filament;
import com.google.android.filament.Renderer;
import com.google.android.filament.Scene;
import com.google.android.filament.Skybox;
import com.google.android.filament.SwapChain;
import com.google.android.filament.View;
import com.google.android.filament.Viewport;
import com.google.android.filament.android.DisplayHelper;
import com.google.android.filament.android.FilamentHelper;
import com.google.android.filament.android.UiHelper;import java.util.ArrayList;/** Filament中待渲染的SurfaceView* 功能可以类比OpenGL ES中的GLSurfaceView* 用于创建Filament的渲染环境*/
public class FLSurfaceView extends SurfaceView {public static int RENDERMODE_WHEN_DIRTY = 0; // 用户请求渲染才渲染一帧public static int RENDERMODE_CONTINUOUSLY = 1; // 持续渲染protected int mRenderMode = RENDERMODE_CONTINUOUSLY; // 渲染模式protected Choreographer mChoreographer; // 消息控制protected DisplayHelper mDisplayHelper; // 管理Display(可以监听分辨率或刷新率的变化)protected UiHelper mUiHelper; // 管理SurfaceView、TextureView、SurfaceHolderprotected Engine mEngine; // 引擎(跟踪用户创建的资源, 管理渲染线程和硬件渲染器)protected Renderer mRenderer; // 渲染器(用于操作系统窗口, 生成绘制命令, 管理帧延时)protected Scene mScene; // 场景(管理渲染对象、灯光)protected View mView; // 存储渲染数据(View是Renderer操作的对象)protected Camera mCamera; // 相机(视角管理)protected Point mDesiredSize; // 渲染分辨率protected float[] mSkyboxColor; // 背景颜色protected SwapChain mSwapChain; // 操作系统的本地可渲染表面(native renderable surface, 通常是一个window或view)protected FrameCallback mFrameCallback = new FrameCallback(); // 帧回调protected ArrayList<RenderCallback> mRenderCallbacks; // 每一帧渲染前的回调(一般用于处理模型变换、相机变换等)static {Filament.init();}public FLSurfaceView(Context context) {super(context);mChoreographer = Choreographer.getInstance();mDisplayHelper = new DisplayHelper(context);mRenderCallbacks = new ArrayList<>();}public void init() { // 初始化setupSurfaceView();setupFilament();setupView();setupScene();}public void setRenderMode(int renderMode) { // 设置渲染模式mRenderMode = renderMode;}public void addRenderCallback(RenderCallback renderCallback) { // 添加渲染回调if (renderCallback != null) {mRenderCallbacks.add(renderCallback);}}public void requestRender() { // 请求渲染mChoreographer.postFrameCallback(mFrameCallback);}public void onResume() { // 恢复mChoreographer.postFrameCallback(mFrameCallback);}public void onPause() { // 暂停mChoreographer.removeFrameCallback(mFrameCallback);}public void onDestroy() { // 销毁Filament环境mChoreographer.removeFrameCallback(mFrameCallback);mRenderCallbacks.clear();mUiHelper.detach();mEngine.destroyRenderer(mRenderer);mEngine.destroyView(mView);mEngine.destroyScene(mScene);mEngine.destroyCameraComponent(mCamera.getEntity());EntityManager entityManager = EntityManager.get();entityManager.destroy(mCamera.getEntity());mEngine.destroy();}protected void setupScene() { // 设置Scene参数}protected void onResized(int width, int height) { // Surface尺寸变化时回调double zoom = 1;double aspect = (double) width / (double) height;mCamera.setProjection(Camera.Projection.ORTHO,-aspect * zoom, aspect * zoom, -zoom, zoom, 0, 1000);}private void setupSurfaceView() { // 设置SurfaceViewmUiHelper = new UiHelper(UiHelper.ContextErrorPolicy.DONT_CHECK);mUiHelper.setRenderCallback(new SurfaceCallback());if (mDesiredSize != null) {mUiHelper.setDesiredSize(mDesiredSize.x, mDesiredSize.y);}mUiHelper.attachTo(this);}private void setupFilament() { // 设置Filament参数mEngine = Engine.create();// mEngine = (new Engine.Builder()).featureLevel(Engine.FeatureLevel.FEATURE_LEVEL_0).build();mRenderer = mEngine.createRenderer();mScene = mEngine.createScene();mView = mEngine.createView();mCamera = mEngine.createCamera(mEngine.getEntityManager().create());}private void setupView() { // 设置View参数float[] color = mSkyboxColor != null ? mSkyboxColor : new float[] {0, 0, 0, 1};Skybox skybox = (new Skybox.Builder()).color(color).build(mEngine);mScene.setSkybox(skybox);if (mEngine.getActiveFeatureLevel() == Engine.FeatureLevel.FEATURE_LEVEL_0) {mView.setPostProcessingEnabled(false); // FEATURE_LEVEL_0不支持post-processing}mView.setCamera(mCamera);mView.setScene(mScene);}/** 帧回调*/private class FrameCallback implements Choreographer.FrameCallback {@Overridepublic void doFrame(long frameTimeNanos) { // 渲染每帧数据if (mRenderMode == RENDERMODE_CONTINUOUSLY) {mChoreographer.postFrameCallback(this); // 请求下一帧}mRenderCallbacks.forEach(callback -> callback.onCall());if (mUiHelper.isReadyToRender()) {if (mRenderer.beginFrame(mSwapChain, frameTimeNanos)) {mRenderer.render(mView);mRenderer.endFrame();}}}}/** Surface回调*/private class SurfaceCallback implements UiHelper.RendererCallback {@Overridepublic void onNativeWindowChanged(Surface surface) { // Native窗口改变时回调if (mSwapChain != null) {mEngine.destroySwapChain(mSwapChain);}long flags = mUiHelper.getSwapChainFlags();if (mEngine.getActiveFeatureLevel() == Engine.FeatureLevel.FEATURE_LEVEL_0) {if (SwapChain.isSRGBSwapChainSupported(mEngine)) {flags = flags | SwapChain.CONFIG_SRGB_COLORSPACE;}}mSwapChain = mEngine.createSwapChain(surface, flags);mDisplayHelper.attach(mRenderer, getDisplay());}@Overridepublic void onDetachedFromSurface() { // 解绑Surface时回调mDisplayHelper.detach();if (mSwapChain != null) {mEngine.destroySwapChain(mSwapChain);mEngine.flushAndWait();mSwapChain = null;}}@Overridepublic void onResized(int width, int height) { // Surface尺寸变化时回调mView.setViewport(new Viewport(0, 0, width, height));FilamentHelper.synchronizePendingFrames(mEngine);FLSurfaceView.this.onResized(width, height);}}/** 每一帧渲染前的回调* 一般用于处理模型变换、相机变换等*/public static interface RenderCallback {void onCall();}
}

        BaseModel.java

package com.zhyan8.texture.filament;import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;import com.google.android.filament.Box;
import com.google.android.filament.Engine;
import com.google.android.filament.EntityManager;
import com.google.android.filament.IndexBuffer;
import com.google.android.filament.Material;
import com.google.android.filament.MaterialInstance;
import com.google.android.filament.RenderableManager;
import com.google.android.filament.RenderableManager.PrimitiveType;
import com.google.android.filament.Texture;
import com.google.android.filament.TransformManager;
import com.google.android.filament.VertexBuffer;
import com.google.android.filament.VertexBuffer.AttributeType;
import com.google.android.filament.VertexBuffer.VertexAttribute;
import com.google.android.filament.android.TextureHelper;import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;/** 模型基类* 管理模型的材质、顶点属性、顶点索引、渲染id*/
public class BaseModel {private static String TAG = "BaseModel";protected Context mContext; // 上下文protected Engine mEngine; // Filament引擎protected TransformManager mTransformManager; // 模型变换管理器protected Material mMaterial; // 模型材质protected MaterialInstance mMaterialInstance; // 模型材质实例protected VertexBuffer mVertexBuffer; // 顶点属性缓存protected IndexBuffer mIndexBuffer; // 顶点索引缓存protected int mRenderable; // 渲染idprotected int mTransformComponent; // 模型变换组件的idprotected Box mBox; // 渲染区域protected FLSurfaceView.RenderCallback mRenderCallback; // 每一帧渲染前的回调(一般用于处理模型变换、相机变换等)public BaseModel(Context context, Engine engine) {mContext = context;mEngine = engine;mTransformManager = mEngine.getTransformManager();}public Material getMaterial() { // 获取材质return mMaterial;}public VertexBuffer getVertexBuffer() { // 获取顶点属性缓存return mVertexBuffer;}public IndexBuffer getIndexBuffer() { // 获取顶点索引缓存return mIndexBuffer;}public int getRenderable() { // 获取渲染idreturn mRenderable;}public FLSurfaceView.RenderCallback getRenderCallback() { // 获取渲染回调return mRenderCallback;}public void destroy() { // 销毁模型mEngine.destroyEntity(mRenderable);mEngine.destroyVertexBuffer(mVertexBuffer);mEngine.destroyIndexBuffer(mIndexBuffer);mEngine.destroyMaterialInstance(mMaterialInstance);mEngine.destroyMaterial(mMaterial);EntityManager entityManager = EntityManager.get();entityManager.destroy(mRenderable);}protected Material loadMaterial(String materialPath) { // 加载材质Buffer buffer = readUncompressedAsset(materialPath);if (buffer != null) {Material material = (new Material.Builder()).payload(buffer, buffer.remaining()).build(mEngine);mMaterialInstance = material.createInstance();material.compile(Material.CompilerPriorityQueue.HIGH,Material.UserVariantFilterBit.ALL,new Handler(Looper.getMainLooper()),() -> Log.i(TAG, "Material " + material.getName() + " compiled."));mEngine.flush();return material;}return null;}protected VertexBuffer getVertexBuffer(float[] values) { // 获取顶点属性缓存ByteBuffer vertexData = getByteBuffer(values);int vertexCount = values.length / 3;int vertexSize = Float.BYTES * 3;VertexBuffer vertexBuffer = new VertexBuffer.Builder().bufferCount(1).vertexCount(vertexCount).attribute(VertexAttribute.POSITION, 0, AttributeType.FLOAT3, 0, vertexSize).build(mEngine);vertexBuffer.setBufferAt(mEngine, 0, vertexData);return vertexBuffer;}protected VertexBuffer getVertexBuffer(VertexPosCol[] values) { // 获取顶点属性缓存ByteBuffer vertexData = getByteBuffer(values);int vertexCount = values.length;int vertexSize = VertexPosCol.BYTES;VertexBuffer vertexBuffer = new VertexBuffer.Builder().bufferCount(1).vertexCount(vertexCount).attribute(VertexAttribute.POSITION, 0, AttributeType.FLOAT3, 0, vertexSize).attribute(VertexAttribute.COLOR,    0, AttributeType.UBYTE4, 3 * Float.BYTES, vertexSize).normalized(VertexAttribute.COLOR).build(mEngine);vertexBuffer.setBufferAt(mEngine, 0, vertexData);return vertexBuffer;}protected VertexBuffer getVertexBuffer(VertexPosUV[] values) { // 获取顶点属性缓存ByteBuffer vertexData = getByteBuffer(values);int vertexCount = values.length;int vertexSize = VertexPosUV.BYTES;VertexBuffer vertexBuffer = new VertexBuffer.Builder().bufferCount(1).vertexCount(vertexCount).attribute(VertexAttribute.POSITION, 0, AttributeType.FLOAT3, 0, vertexSize).attribute(VertexAttribute.UV0,    0, AttributeType.FLOAT2, 3 * Float.BYTES, vertexSize).build(mEngine);vertexBuffer.setBufferAt(mEngine, 0, vertexData);return vertexBuffer;}protected IndexBuffer getIndexBuffer(short[] values) { // 获取顶点索引缓存ByteBuffer indexData = getByteBuffer(values);int indexCount = values.length;IndexBuffer indexBuffer = new IndexBuffer.Builder().indexCount(indexCount).bufferType(IndexBuffer.Builder.IndexType.USHORT).build(mEngine);indexBuffer.setBuffer(mEngine, indexData);return indexBuffer;}protected int getRenderable(PrimitiveType primitiveType, int vertexCount) { // 获取渲染idint renderable = EntityManager.get().create();new RenderableManager.Builder(1).boundingBox(mBox).geometry(0, primitiveType, mVertexBuffer, mIndexBuffer, 0, vertexCount).material(0, mMaterialInstance).build(mEngine, renderable);return renderable;}protected Texture getTexture(String texturePath) { // 获取TextureBitmap bitmap = loadBitmapFromAsset(texturePath);if (bitmap != null) {return generateTexture(bitmap);}return null;}protected Texture getTexture(int resourceId) { // 获取TextureBitmap bitmap = loadBitmapFromDrawable(resourceId);if (bitmap != null) {return generateTexture(bitmap);}return null;}private Texture generateTexture(Bitmap bitmap) { // 生成TextureTexture texture = new Texture.Builder().width(bitmap.getWidth()).height(bitmap.getHeight()).sampler(Texture.Sampler.SAMPLER_2D).format(Texture.InternalFormat.SRGB8_A8).levels(0xff).build(mEngine);TextureHelper.setBitmap(mEngine, texture, 0, bitmap, new Handler(), () ->Log.i(TAG, "getTexture, Bitmap is released."));texture.generateMipmaps(mEngine);return texture;}private Buffer readUncompressedAsset(String assetPath) { // 加载资源ReadableByteChannel src = null;FileInputStream fis = null;ByteBuffer dist = null;try {AssetFileDescriptor fd = mContext.getAssets().openFd(assetPath);fis = fd.createInputStream();dist = ByteBuffer.allocate((int) fd.getLength());src = Channels.newChannel(fis);src.read(dist);} catch (IOException e) {e.printStackTrace();} finally {if (src != null) {try {src.close();} catch (IOException e) {e.printStackTrace();}}if (fis != null) {try {fis.close();} catch (IOException e) {e.printStackTrace();}}}if (dist != null) {return dist.rewind();}return null;}private Bitmap loadBitmapFromAsset(String assetPath) { // 从asset中加载bitmapInputStream inputStream = null;Bitmap bitmap = null;try {inputStream = mContext.getAssets().open(assetPath);bitmap = BitmapFactory.decodeStream(inputStream);} catch (IOException e) {e.printStackTrace();} finally {if (inputStream != null) {try {inputStream.close();} catch (IOException e) {e.printStackTrace();}}}return bitmap;}private Bitmap loadBitmapFromDrawable(int resourceId) { // 从drawable中加载bitmapBitmapFactory.Options options = new BitmapFactory.Options();options.inPremultiplied = true;Bitmap bitmap = BitmapFactory.decodeResource(mContext.getResources(), resourceId, options);return bitmap;}private ByteBuffer getByteBuffer(float[] values) { // float数组转换为ByteBufferByteBuffer byteBuffer = ByteBuffer.allocate(values.length * Float.BYTES);byteBuffer.order(ByteOrder.nativeOrder());for (int i = 0; i < values.length; i++) {byteBuffer.putFloat(values[i]);}byteBuffer.flip();return byteBuffer;}private ByteBuffer getByteBuffer(short[] values) { // short数组转换为ByteBufferByteBuffer byteBuffer = ByteBuffer.allocate(values.length * Short.BYTES);byteBuffer.order(ByteOrder.nativeOrder());for (int i = 0; i < values.length; i++) {byteBuffer.putShort(values[i]);}byteBuffer.flip();return byteBuffer;}private ByteBuffer getByteBuffer(VertexPosCol[] values) { // VertexPosCol数组转换为ByteBufferByteBuffer byteBuffer = ByteBuffer.allocate(values.length * VertexPosCol.BYTES);byteBuffer.order(ByteOrder.nativeOrder());for (int i = 0; i < values.length; i++) {values[i].put(byteBuffer);}byteBuffer.flip();return byteBuffer;}private ByteBuffer getByteBuffer(VertexPosUV[] values) { // VertexPosUV数组转换为ByteBufferByteBuffer byteBuffer = ByteBuffer.allocate(values.length * VertexPosUV.BYTES);byteBuffer.order(ByteOrder.nativeOrder());for (int i = 0; i < values.length; i++) {values[i].put(byteBuffer);}byteBuffer.flip();return byteBuffer;}/** 顶点数据(位置+颜色)* 包含顶点位置和颜色*/public static class VertexPosCol {public static int BYTES = 16;public float x;public float y;public float z;public int color;public VertexPosCol() {}public VertexPosCol(float x, float y, float z, int color) {this.x = x;this.y = y;this.z = z;this.color = color;}public ByteBuffer put(ByteBuffer buffer) { // VertexPosCol转换为ByteBufferbuffer.putFloat(x);buffer.putFloat(y);buffer.putFloat(z);buffer.putInt(color);return buffer;}}/** 顶点数据(位置+纹理坐标)* 包含顶点位置和纹理坐标*/public static class VertexPosUV {public static int BYTES = 20;public float x;public float y;public float z;public float u;public float v;public VertexPosUV() {}public VertexPosUV(float x, float y, float z, float u, float v) {this.x = x;this.y = y;this.z = z;this.u = u;this.v = v;}public ByteBuffer put(ByteBuffer buffer) { // VertexPosUV转换为ByteBufferbuffer.putFloat(x);buffer.putFloat(y);buffer.putFloat(z);buffer.putFloat(u);buffer.putFloat(v);return buffer;}}
}

2.2 纹理贴图实现

        MainActivity.java

package com.zhyan8.texture;import android.os.Bundle;import androidx.appcompat.app.AppCompatActivity;import com.zhyan8.texture.filament.FLSurfaceView;public class MainActivity extends AppCompatActivity {private FLSurfaceView mFLSurfaceView;@Overrideprotected void onCreate(Bundle savedInstanceState) {super.onCreate(savedInstanceState);mFLSurfaceView = new MyFLSurfaceView(this);setContentView(mFLSurfaceView);mFLSurfaceView.init();mFLSurfaceView.setRenderMode(FLSurfaceView.RENDERMODE_CONTINUOUSLY);}@Overridepublic void onResume() {super.onResume();mFLSurfaceView.onResume();}@Overridepublic void onPause() {super.onPause();mFLSurfaceView.onPause();}@Overridepublic void onDestroy() {super.onDestroy();mFLSurfaceView.onDestroy();}
}

        MyFLSurfaceView.java

package com.zhyan8.texture;import android.content.Context;import com.google.android.filament.Camera;
import com.zhyan8.texture.filament.BaseModel;
import com.zhyan8.texture.filament.FLSurfaceView;public class MyFLSurfaceView extends FLSurfaceView {private BaseModel mMyModel;public MyFLSurfaceView(Context context) {super(context);}public void init() {mSkyboxColor = new float[] {0.965f, 0.941f, 0.887f, 1};super.init();}@Overridepublic void onDestroy() {mMyModel.destroy();super.onDestroy();}@Overrideprotected void setupScene() { // 设置Scene参数mMyModel = new Square(getContext(), mEngine);mScene.addEntity(mMyModel.getRenderable());}@Overrideprotected void onResized(int width, int height) {mCamera.setProjection(Camera.Projection.ORTHO, -1, 1, -1, 1, 0, 10);}
}

        Square.java

package com.zhyan8.texture;import android.content.Context;import com.google.android.filament.Box;
import com.google.android.filament.Engine;
import com.google.android.filament.RenderableManager.PrimitiveType;
import com.google.android.filament.TextureSampler;
import com.zhyan8.texture.filament.BaseModel;public class Square extends BaseModel {private String materialPath = "materials/square.filamat";private String texturePath = "textures/1.jpg";private VertexPosUV[] mVertices = new VertexPosUV[] {new VertexPosUV(-1f, -1f, 0f, 0f, 0f), // 左下new VertexPosUV(1f, -1f, 0f, 1f, 0f), // 右下new VertexPosUV(1f, 1f, 0f, 1f, 1f), // 右上new VertexPosUV(-1f, 1f, 0f, 0f, 1f) // 左上};private short[] mIndex = new short[] {0, 1, 2, 0, 2, 3};public Square(Context context, Engine engine) {super(context, engine);init();}private void init() {mBox = new Box(0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.01f);mMaterial = loadMaterial(materialPath);mMaterialInstance.setParameter("mainTex", getTexture(texturePath), new TextureSampler());mVertexBuffer = getVertexBuffer(mVertices);mIndexBuffer = getIndexBuffer(mIndex);mRenderable = getRenderable(PrimitiveType.TRIANGLES, mIndex.length);}
}

        square.mat

material {name : square,shadingModel : unlit, // 禁用所有lighting// 自定义变量参数parameters : [{type : sampler2d,name : mainTex}],// 顶点着色器入参MaterialVertexInputs中需要的顶点属性requires : [uv0]
}fragment {void material(inout MaterialInputs material) {prepareMaterial(material); // 在方法返回前必须回调该函数material.baseColor = texture(materialParams_mainTex, getUV0());}
}

        transform.bat

@echo off
setlocal enabledelayedexpansion
set "srcFolder=../src/main/materials"
set "distFolder=../src/main/assets/materials"for %%f in ("%srcFolder%\*.mat") do (set "matfile=%%~nf"matc -p mobile -a opengl -o "!matfile!.filamat" "%%f"move "!matfile!.filamat" "%distFolder%\!matfile!.filamat"
)echo Processing complete.
pause

        说明:需要将 matc.exe 文件与 transform.bat 文件放在同一个目录下面,matc.exe 源自Filament环境搭建中编译生成的 exe 文件。双击 transform.bat 文件,会自动将 /src/main/materials/ 下面的所有 mat 文件全部转换为 filamat 文件,并移到 /src/main/assets/materials/ 目录下面。

        运行效果如下。

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.rhkb.cn/news/224977.html

如若内容造成侵权/违法违规/事实不符,请联系长河编程网进行投诉反馈email:809451989@qq.com,一经查实,立即删除!

相关文章

分页合理化是什么?

一、前言 大家好&#xff01;我是sum墨&#xff0c;一个一线的底层码农&#xff0c;平时喜欢研究和思考一些技术相关的问题并整理成文&#xff0c;限于本人水平&#xff0c;如果文章和代码有表述不当之处&#xff0c;还请不吝赐教。 只要是干过后台系统的同学应该都做过分页查…

视频物体对象追踪AI技术模型——Tracking Any Object Amodally

项目地址&#xff1a;https://tao-amodal.github.io 论文&#xff1a;https://arxiv.org/abs/2312.12433 GitHub&#xff1a;GitHub - WesleyHsieh0806/TAO-Amodal: Official Code for Tracking Any Object Amodally AIGC专区&#xff1a;aigc 更多消息&#xff1a;AI人工智能行…

地图服务器GeoServer的安装与配置

文章目录 1.安装配置Java2.安装配置Tomcat3 安装配置GeoServer GeoServer提供了多种安装配置方式&#xff0c;但是本质上GeoServer是一个基于Java Web的项目&#xff0c;因此我们理论上只需要安装Java&#xff0c;并且将其放置在一个Web服务器&#xff08;例如Apache Tomcat&am…

uniapp使用colorUI

colorUI 微动画 | ColorUI 使用文档 1&#xff1a;把colorui里三个文件复制到自己项目中去 App.vue </script> <style> import url(colorui/icon.css); import url(colorui/main.css); import url("colorui/animation.css");-webkit-keyframes show {…

element步骤条<el-steps>使用具名插槽自定义

element步骤条使用具名插槽自定义 步骤条使用具名插槽: <el-steps direction"vertical" :active"1"><el-step><template slot"description">//在此处可以写你的插槽内容</template>/el-step> </el-steps>步骤…

【美团大数据面试】Java面试题附答案

目录 1.多线程代码示例 2.单例代码示例 3.LinkedBlockingQueue原理解析 4.模板设计模式讲解 5.生产者-消费者队列设计方法 6.堆内存和栈内存的区别 7.ThreadLocal底层机制 8.synchronized原理&#xff0c;存在的问题&#xff0c;解决方案 9.volatile使用场景和原理&am…

20231225在WIN10下使用SSH连接Ubuntu20.04.6

20231225在WIN10下使用SSH连接Ubuntu20.04.6 2023/12/25 23:03 https://jingyan.baidu.com/article/5552ef479e1856108ffbc9e3.html Win10怎么开启SSH功能 Win10怎么开启SSH功能,下面就一起来看看吧! 工具/原料 华硕天选4 Windows10 方法/步骤 点击左下角的开始菜单,打开Wind…

FPGA-ZYNQ-7000 SoC在嵌入式系统中的优势

FPGA-ZYNQ-7000 SoC在嵌入式系统中的优势 本章节主要参考书籍《Xilinx Zynq-7000 嵌入式系统设计与实现 基于ARM Cortex-A9双核处理器和Vivado的设计方法 (何宾&#xff0c;张艳辉编著&#xff09;》 本章节主要讲述FPGA-ZYNQ-7000 SoC在嵌入式系统中的优势&#xff0c;学习笔…

视频批量处理:随机分割方法,创新剪辑方式

随着数字媒体技术的飞速发展&#xff0c;视频处理已是日常生活和工作中不可或缺的一部分。在处理大量视频时&#xff0c;要一种高效、自动化的方法来满足需求。现在一起来看云炫AI智剪如何批量随机分割视频的批量处理方法&#xff0c;给视频剪辑工作带来创新。 视频随机分割4段…

恶意软件样本行为分析——Process Monitor和Wireshark

1.1 实验名称 恶意软件样本行为分析 1.2 实验目的 1) 熟悉 Process Monitor 的使用 2) 熟悉抓包工具 Wireshark 的使用 3) VMware 的熟悉和使用 4) 灰鸽子木马的行为分析 1.3 实验步骤及内容 第一阶段&#xff1a;熟悉 Process Monitor 的使用 利用 Process …

OpenCV之图像匹配与定位

利用图像特征的keypoints和descriptor来实现图像的匹配与定位。图像匹配算法主要有暴力匹配和FLANN匹配&#xff0c;而图像定位是通过图像匹配结果来反向查询它们在目标图片中的具体坐标位置。 以QQ登录界面为例&#xff0c;将整个QQ登录界面保存为QQ.png文件&#xff0c;QQ登…

新型智慧城市解决方案:PPT全文56页,附下载

关键词&#xff1a;智慧城市解决方案&#xff0c;智慧城市管理技术&#xff0c;智慧城市建设&#xff0c;数字城市建设 一、智慧城市宏观形势 1、政策支持&#xff1a;出台了一系列政策&#xff0c;鼓励和支持智慧城市的发展。这些政策为智慧城市的建设提供了政策保障和资金支…

Webpack基础使用

目录 一.什么是Webpack 二.为什么要使用Webpack 三.Webpack的使用 1.下载yarn包管理器 2.Webpack的安装 3.Webpack的简单使用 4.效果 四.Webpack打包流程 一.什么是Webpack Webpack是一个静态模块打包工具 二.为什么要使用Webpack 在开发中&#xff0c;我们常常会遇到…

Zookeeper入门

ZooKeeper 是一个开源的分布式协调架&#xff0c;主要用来解决分布式集群中应用系统的一致性问题 本质 分布式的文件存储系统(Zookeeper文件系统监听机制)&#xff0c;是一个基于观察者模式设计的分布式服务管理框架 zookeeper的数据结构 Zookeeper的层次模型称作Data Tree,…

go语言初体验1--使用go install

当安装后go语言后。 尝试编写go程序。 当使用 go install 命令&#xff0c;报错。 go: go install requires a version when current directory is not in a moduleTry go install jvmgo\ch01latest to install the latest version通过查找资料。 用命令&#xff1a; go env …

C语言字符串处理提取时间(ffmpeg返回的时间字符串)

【1】需求 需求&#xff1a;有一个 “00:01:33.90” 这样格式的时间字符串&#xff0c;需要将这个字符串的时间值提取打印出来&#xff08;提取时、分、秒、毫秒&#xff09;。 这个时间字符串从哪里来的&#xff1f; 是ffmpeg返回的时间&#xff0c;也就是视频的总时间。 下…

树莓派,mediapipe,Picamera2利用舵机云台追踪人手(PID控制)

一、项目目标 追踪人手大拇指指尖&#xff1a; 当人手移动时&#xff0c;摄像头通过控制两个伺服电机&#xff08;分别是偏航和俯仰&#xff09;把大拇指指尖放到视界的中心位置&#xff0c;本文采用了PID控制伺服电机 Mediapipe Hand简介 MediaPipe 手部标志任务可检测图像…

怎么搭建实时渲染云传输服务器

实时渲染云传输技术方案&#xff0c;在数字孪生、虚拟仿真领域使用越来越多&#xff0c;可能很多想使用该技术方案项目还不知道具体该怎么搭建云传输服务器&#xff0c;具体怎么使用实时云渲染平台系统。点量云小芹将对这两个问题做集中分享。 一、实时渲染服务器怎么搭建&…

PMP®项目管理,2024年1月4日开课啦~想了解的提前查看!

PMP项目管理认证 1&#x1f237;4日开课~ 想报名的提前预约啦 &#x1f447;&#x1f447;&#x1f447; &#x1f446;&#xff08;以上是PMP课程内容&#xff09; 课程介绍 PMP&#xff08;Project Management Professional&#xff09;是由美国项目管理协会&#xff08;…

关于 Appium 各种版本的安装,都在这里

大家在初次接触 Appium 时会看到网上各种帖子讲解如何安装 Appium&#xff0c;各种 Appium 版本的安装教程满天飞&#xff0c;而很多帖子中提供的安装教程是已经过时了的&#xff0c;容易误导初学者。 这篇文章带着你一起全面了解 Appium 各种版本如何选择如何安装。 一句话概述…