我正在flutter中实现第三方android sdk,并且我希望在sdk启动时将一条消息从android传递到flutter。>
我已经使用平台通道实现了sdk,只需要处理回调代码即可。在代码中,有一个名为onChannelJoin的函数,我想在调用此函数时向消息发送消息]
主要活动
public class MainActivity extends FlutterActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); GeneratedPluginRegistrant.registerWith(this); final String CHANNEL = "samples.flutter.io/screen_record"; new MethodChannel(getFlutterView(), CHANNEL).setMethodCallHandler( new MethodChannel.MethodCallHandler() { @Override public void onMethodCall(MethodCall call, MethodChannel.Result result) { // TODO if (call.method.equals("startScreenShare")) { Intent intent = new Intent(MainActivity.this , HelloAgoraScreenSharingActivity.class); startActivity(intent); } else { result.notImplemented(); } } }); } }
ScreenSharingActivity
public class HelloAgoraScreenSharingActivity extends Activity { private static final String LOG_TAG = "AgoraScreenSharing"; private static final int PERMISSION_REQ_ID_RECORD_AUDIO = 22; private ScreenCapture mScreenCapture; private GLRender mScreenGLRender; private RtcEngine mRtcEngine; private boolean mIsLandSpace = false; private void initModules() { DisplayMetrics metrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(metrics); if (mScreenGLRender == null) { mScreenGLRender = new GLRender(); } if (mScreenCapture == null) { mScreenCapture = new ScreenCapture(getApplicationContext(), mScreenGLRender, metrics.densityDpi); } mScreenCapture.mImgTexSrcConnector.connect(new SinkConnector<ImgTexFrame>() { @Override public void onFormatChanged(Object obj) { Log.d(LOG_TAG, "onFormatChanged " + obj.toString()); } @Override public void onFrameAvailable(ImgTexFrame frame) { Log.d(LOG_TAG, "onFrameAvailable " + frame.toString()); if (mRtcEngine == null) { return; } AgoraVideoFrame vf = new AgoraVideoFrame(); vf.format = AgoraVideoFrame.FORMAT_TEXTURE_OES; vf.timeStamp = frame.pts; vf.stride = frame.mFormat.mWidth; vf.height = frame.mFormat.mHeight; vf.textureID = frame.mTextureId; vf.syncMode = true; vf.eglContext14 = mScreenGLRender.getEGLContext(); vf.transform = frame.mTexMatrix; mRtcEngine.pushExternalVideoFrame(vf); } }); mScreenCapture.setOnScreenCaptureListener(new ScreenCapture.OnScreenCaptureListener() { @Override public void onStarted() { Log.d(LOG_TAG, "Screen Record Started"); } @Override public void onError(int err) { Log.d(LOG_TAG, "onError " + err); switch (err) { case ScreenCapture.SCREEN_ERROR_SYSTEM_UNSUPPORTED: break; case ScreenCapture.SCREEN_ERROR_PERMISSION_DENIED: break; } } }); WindowManager wm = (WindowManager) getApplicationContext() .getSystemService(Context.WINDOW_SERVICE); int screenWidth = wm.getDefaultDisplay().getWidth(); int screenHeight = wm.getDefaultDisplay().getHeight(); if ((mIsLandSpace && screenWidth < screenHeight) || (!mIsLandSpace) && screenWidth > screenHeight) { screenWidth = wm.getDefaultDisplay().getHeight(); screenHeight = wm.getDefaultDisplay().getWidth(); } setOffscreenPreview(screenWidth, screenHeight); if (mRtcEngine == null) { try { mRtcEngine = RtcEngine.create(getApplicationContext(), "Agora_id", new IRtcEngineEventHandler() { @Override public void onJoinChannelSuccess(String channel, int uid, int elapsed) { Log.d(LOG_TAG, "onJoinChannelSuccess " + channel + " " + elapsed); } @Override public void onWarning(int warn) { Log.d(LOG_TAG, "onWarning " + warn); } @Override public void onError(int err) { Log.d(LOG_TAG, "onError " + err); } @Override public void onAudioRouteChanged(int routing) { Log.d(LOG_TAG, "onAudioRouteChanged " + routing); } }); } catch (Exception e) { Log.e(LOG_TAG, Log.getStackTraceString(e)); throw new RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e)); } mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING); mRtcEngine.enableVideo(); if (mRtcEngine.isTextureEncodeSupported()) { mRtcEngine.setExternalVideoSource(true, true, true); } else { throw new RuntimeException("Can not work on device do not supporting texture" + mRtcEngine.isTextureEncodeSupported()); } mRtcEngine.setVideoProfile(Constants.VIDEO_PROFILE_360P, true); mRtcEngine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER); } } private void deInitModules() { RtcEngine.destroy(); mRtcEngine = null; if (mScreenCapture != null) { mScreenCapture.release(); mScreenCapture = null; } if (mScreenGLRender != null) { mScreenGLRender.quit(); mScreenGLRender = null; } } /** * Set offscreen preview. * * @param width offscreen width * @param height offscreen height * @throws IllegalArgumentException */ public void setOffscreenPreview(int width, int height) throws IllegalArgumentException { if (width <= 0 || height <= 0) { throw new IllegalArgumentException("Invalid offscreen resolution"); } mScreenGLRender.init(width, height); } private void startCapture() { mScreenCapture.start(); } private void stopCapture() { mScreenCapture.stop(); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_hello_agora_screen_sharing); } public void onLiveSharingScreenClicked(View view) { Button button = (Button) view; boolean selected = button.isSelected(); button.setSelected(!selected); if (button.isSelected()) { initModules(); startCapture(); String channel = "ss_test" + System.currentTimeMillis(); channel = "ss_test"; button.setText("stop"); mRtcEngine.muteAllRemoteAudioStreams(true); mRtcEngine.muteAllRemoteVideoStreams(true); mRtcEngine.joinChannel(null, channel, "", 0); } else { button.setText("start"); mRtcEngine.leaveChannel(); stopCapture(); } } @Override protected void onDestroy() { super.onDestroy(); deInitModules(); } }
飞镖代码
<<const platform = const MethodChannel('samples.flutter.io/screen_record'); try { final int result = await platform.invokeMethod('startScreenShare'); } on PlatformException catch (e) {} setState(() { });
这里是您的问题的解决方案。我遇到了同样的问题,但是我没有在线获得任何解决方案。经过大量搜索后,使用ActivityForResult解决了https://www.linkedin.com/pulse/solution-major-flutterdev-challenge-retrieving-data-from-okare-shaba