使用 Gstreamer 录制 Mediasoup 出现错误

问题描述 投票:0回答:1

我正在开发一个项目,该项目使用nodejs和mediasoup制作直播流媒体广播(1对n流媒体)。 流式传输的主要程序工作正常,但我想从头到尾记录所有流。 我使用 Gstreamer 来实现此目的,因为我发现 FFMPEG 在这种情况下录制音频时存在问题。 但是当我开始录制时我遇到了这个错误

nodejs  | 0:00:08.110327248    43 0x55e55045e5e0 ERROR        rtpjitterbuffer gstrtpjitterbuffer.c:1528:gst_jitter_buffer_sink_parse_caps:<rtpjitterbuffer1> Got caps with wrong payload type (got 96, expected 111)
nodejs  | 0:00:08.110412676    43 0x55e55045e5e0 WARN         rtpjitterbuffer gstrtpjitterbuffer.c:3594:gst_rtp_jitter_buffer_chain:<rtpjitterbuffer1> No clock-rate in caps!, dropping buffer

我真的不知道如何解决这个问题,因为这是我在这个领域的第一个项目

这是我的服务器端代码:

/**
 * integrating mediasoup server with a node.js application
 */

/* Please follow mediasoup installation requirements */
/* https://mediasoup.org/documentation/v3/mediasoup/installation/ */
import express from 'express'
const app = express()

import Process from "child_process"
import https from 'httpolyglot'
import fs from 'fs'
import path from 'path'
const __dirname = path.resolve()

import { Server } from 'socket.io'
import mediasoup from 'mediasoup'
// const http = require('https');


// Recoding related modules
// Importing modules using the import statement
import config from './config.js';
import FFmpeg from './ffmpeg.js';
import GStreamer from './gstreamer.js';
import Peer from './peer.js';
import {
  getPort,
  releasePort
} from './port.js';

const PROCESS_NAME = process.env.PROCESS_NAME || 'gstreamer';
const SERVER_PORT = process.env.SERVER_PORT || 3000;


app.get('/', (req, res) => {
  res.send('Hello from mediasoup app!')
})

app.use('/sfu', express.static(path.join(__dirname, 'public')))

// SSL cert for HTTPS access
const options = {
  key: fs.readFileSync('./server/ssl/key.pem'),
  cert: fs.readFileSync('./server/ssl/cert.pem')
}

const httpsServer = https.createServer(options, app)
httpsServer.listen(SERVER_PORT, () => {
  console.log('listening on port: ' + SERVER_PORT)
})

const io = new Server(httpsServer, {
  cors: {
    origin: '*',
  }
});

app.use(express.json());

// socket.io namespace (could represent a room?)
const peers = io.of('/mediasoup')

/**
 * Worker
 * |-> Router(s)
 *     |-> Producer Transport(s)
 *         |-> Producer
 *     |-> Consumer Transport(s)
 *         |-> Consumer 
 **/
let worker
// let router
// let producerTransport
// let consumerTransport
// let producer
// let consumer
let room_data = new Map()
let producer_data = new Map()
let consumer_data = new Map()
let consuming_consumer_data = new Map()
let clients = new Map()

const webRtcTransport_options = {
  listenIps: [
    {
      ip: '0.0.0.0', // replace with relevant IP address
      announcedIp: 'MY_PUBLIC_IP',
    }
  ],
  enableUdp: true,
  enableTcp: true,
  preferUdp: true,
}

const createWorker = async () => {
  try {
    worker = await mediasoup.createWorker({
      rtcMinPort: 2000,
      rtcMaxPort: 2020,
    });
    console.log(`Worker PID: ${worker.pid}`);

    worker.on('died', () => {
      // Handle worker died event here
      console.error('mediasoup worker has died');
    });

    // Other worker-related events or error handling can be added here

    return worker;
  } catch (error) {
    console.error('Error creating mediasoup worker:', error);
    // Implement error handling or retries if worker creation fails
  }
};

// We create a Worker as soon as our application starts
worker = createWorker()

// This is an Array of RtpCapabilities
// https://mediasoup.org/documentation/v3/mediasoup/rtp-parameters-and-capabilities/#RtpCodecCapability
// list of media codecs supported by mediasoup ...
// https://github.com/versatica/mediasoup/blob/v3/src/supportedRtpCapabilities.ts
// const mediaCodecs = [
//   {
//     kind: 'audio',
//     mimeType: 'audio/opus',
//     clockRate: 48000,
//     channels: 2,
//   },
//   {
//     kind: 'video',
//     mimeType: 'video/VP8',
//     clockRate: 90000,
//     parameters: {
//       'x-google-start-bitrate': 1000,
//     },
//   },
// ]

const mediaCodecs = [
  {
    kind: "audio",
    mimeType: "audio/opus",
    preferredPayloadType: 111,
    clockRate: 48000,
    channels: 2,
    parameters: {
      minptime: 10,
      useinbandfec: 1,
    },
  },
  {
    kind: "video",
    mimeType: "video/VP8",
    preferredPayloadType: 96,
    clockRate: 90000,
  },
  {
    kind: "video",
    mimeType: "video/H264",
    preferredPayloadType: 125,
    clockRate: 90000,
    parameters: {
      "level-asymmetry-allowed": 1,
      "packetization-mode": 1,
      "profile-level-id": "42e01f",
    },
  },
]


app.get('/test', (req, res) => {
  res.send('This is a test endpoint');
});


peers.on('connection', async socket => {
  
  try {
    // Use socket id for identifing each client
    console.log(socket.id)
    const peer = new Peer(socket.id);
    clients.set(socket.id, peer);

    socket.emit('connection-success', {
      socketId: socket.id
    })

  } catch (error) {
    console.error('Failed to create new peer [error:%o]', error);
    socket.terminate();
    return;
  }

  socket.on('disconnect', () => {
    // do some cleanup
    console.log('peer disconnected')
  })


  socket.on('createRoom', async ({ room_id, session_id }, callback) => {

    const room = await worker.createRouter({ mediaCodecs, })
    const client = clients.get(session_id)
    
    room_data.set(room_id, room);
    client.room_id = room_id;
    const params = {
      'rtpCapability': room.rtpCapabilities
    }

    // TODO => Send http request to django for changing the state of room

    callback(params)

  })

  socket.on('getRtpCapabilities', ({ room_id }, callback) => {
    const room = room_data.get(room_id)
    const rtpCapabilities = room.rtpCapabilities

    console.log('rtp Capabilities', rtpCapabilities)

    // call callback from the client and send back the rtpCapabilities
    callback({ rtpCapabilities })
  })

  socket.on('createSendTransport', async ({ room_id }, callback) => {
    const client = Peer.findByRoomId(clients, room_id)
    const room = room_data.get(client.room_id)
    // create seperate transports for media
    let video_transport = await room.createWebRtcTransport(webRtcTransport_options)
    let audio_transport = await room.createWebRtcTransport(webRtcTransport_options)

    console.log(`(TransportCreation) video sending transport id: ${video_transport.id}`)
    console.log(`(TransportCreation) audio sending transport id: ${audio_transport.id}`)

    video_transport.on('dtlsstatechange', dtlsState => {
      if (dtlsState === 'closed') {
        video_transport.close()
      }
    })

    audio_transport.on('dtlsstatechange', dtlsState => {
      if (dtlsState === 'closed') {
        audio_transport.close()
      }
    })

    video_transport.on('close', () => {
      console.log('video transport closed')
    })

    audio_transport.on('close', () => {
      console.log('audio transport closed')
    })

    // producer_data.set(room_id, transport);
    client.video_producer = video_transport;
    client.audio_producer = audio_transport;    

    callback({
      video_id: video_transport.id,
      video_iceParameters: video_transport.iceParameters,
      video_iceCandidates: video_transport.iceCandidates,
      video_dtlsParameters: video_transport.dtlsParameters,
      audio_id: audio_transport.id,
      audio_iceParameters: audio_transport.iceParameters,
      audio_iceCandidates: audio_transport.iceCandidates,
      audio_dtlsParameters: audio_transport.dtlsParameters,
    })
  })

  // see client's socket.emit('transport-connect', ...)
  socket.on('transport-connect', async ({ room_id, dtlsParameters, kind }) => {
    console.log('DTLS PARAMS... ', { dtlsParameters })
    console.log("(Transport connection) Kind: ", kind)
    const client = Peer.findByRoomId(clients, room_id)
    
    if (kind == "video"){
      await client.video_producer.connect({dtlsParameters})
    }
    else if (kind == "audio"){
      await client.audio_producer.connect({dtlsParameters})
    }

  })

  // see client's socket.emit('transport-produce', ...)
  socket.on('transport-produce', async ({ room_id, kind, rtpParameters }, callback) => {
  // call produce based on the prameters from the client
  const client = Peer.findByRoomId(clients, room_id)
  let producing_producer;

  if (kind == "video"){
    producing_producer = await client.video_producer.produce({
      kind,
      rtpParameters,
    })
    client.video_producer = producing_producer
  }
  else if (kind == "audio"){
    producing_producer = await client.audio_producer.produce({
      kind,
      rtpParameters,
    })
    client.audio_producer = producing_producer
  }

  console.log('(Transport Production) Producer ID: ', producing_producer.id, producing_producer.kind)
  // producer_data.set(room_id, producing_producer)

  producing_producer.on('transportclose', () => {
    console.log('transport for this producer closed ')
    producing_producer.close()
  })

  // Send back to the client the Producer's id
  callback({
    id: producing_producer.id
  })
})


  socket.on('createRecvTransport', async ({ room_id }, callback) => {
    const client = Peer.findByRoomId(clients, room_id)
    const room = room_data.get(client.room_id)

    let video_transport = await room.createWebRtcTransport(webRtcTransport_options)
    let audio_transport = await room.createWebRtcTransport(webRtcTransport_options)
    
    console.log(`(TransportCreation) video recieving transport id: ${video_transport.id}`)
    console.log(`(TransportCreation) audio recieving transport id: ${audio_transport.id}`)

    video_transport.on('dtlsstatechange', dtlsState => {
      if (dtlsState === 'closed') {
        video_transport.close()
      }
    })

    audio_transport.on('dtlsstatechange', dtlsState => {
      if (dtlsState === 'closed') {
        audio_transport.close()
      }
    })

    video_transport.on('close', () => {
      console.log('video transport closed')
    })

    audio_transport.on('close', () => {
      console.log('audio transport closed')
    })

    client.video_consumer = video_transport;
    client.audio_consumer = audio_transport;

    callback({
      video_id: video_transport.id,
      video_iceParameters: video_transport.iceParameters,
      video_iceCandidates: video_transport.iceCandidates,
      video_dtlsParameters: video_transport.dtlsParameters,
      audio_id: audio_transport.id,
      audio_iceParameters: audio_transport.iceParameters,
      audio_iceCandidates: audio_transport.iceCandidates,
      audio_dtlsParameters: audio_transport.dtlsParameters,
    })
})

  // see client's socket.emit('transport-recv-connect', ...)
  socket.on('transport-recv-connect', async ({ room_id, dtlsParameters, kind }) => {
    const client = Peer.findByRoomId(clients, room_id)
    console.log("(Transport connect) Kind: ", kind)

    if (kind == "video"){
      await client.video_consumer.connect({ dtlsParameters })
    }
    else if (kind == "audio"){
      await client.audio_consumer.connect({ dtlsParameters })
    }
  })

  socket.on('consume', async ({ room_id, rtpCapabilities }, callback) => {
    const client = Peer.findByRoomId(clients, room_id)
    const room = room_data.get(client.room_id)
    let video_consuming_consumer;
    let audio_consuming_consumer;
    let audio_params = {};
    let video_params = {};
    let params = {};

    try {
      console.log('Tying consuming video')
      if (room.canConsume(
        {
          producerId: client.video_producer.id,
          rtpCapabilities: rtpCapabilities
        }
        )) {

        console.log("Yes it can consume video");
        video_consuming_consumer = await client.video_consumer.consume({
          producerId: client.video_producer.id,
          rtpCapabilities,
          paused: true,
        })
        console.log('This is video consumer: ', video_consuming_consumer);

        video_consuming_consumer.on('transportclose', () => {
          console.log('video transport close from consumer')
        })

        video_consuming_consumer.on('producerclose', () => {
          console.log('producer of video consumer closed')
        })

        client.video_consuming_consumer = video_consuming_consumer;

        // from the consumer extract the following params
        // to send back to the Client
        video_params = {
          video_id: video_consuming_consumer.id,
          video_producerId: client.video_producer.id,
          video_kind: video_consuming_consumer.kind,
          video_rtpParameters: video_consuming_consumer.rtpParameters,
        }

      }

      console.log('Tying consuming audio')
      if (room.canConsume(
        {
          producerId: client.audio_producer.id,
          rtpCapabilities: rtpCapabilities
        }
        )) {

        console.log("Yes it can consume audio");
        audio_consuming_consumer = await client.audio_consumer.consume({
          producerId: client.audio_producer.id,
          rtpCapabilities,
          paused: true,
        })
        console.log('This is audio consumer: ', audio_consuming_consumer);

        audio_consuming_consumer.on('transportclose', () => {
          console.log('audio transport close from consumer')
        })

        audio_consuming_consumer.on('producerclose', () => {
          console.log('producer of audio consumer closed')
        })

        client.audio_consuming_consumer = audio_consuming_consumer;

        // from the consumer extract the following params
        // to send back to the Client
        audio_params = {
          audio_id: audio_consuming_consumer.id,
          audio_producerId: client.audio_producer.id,
          audio_kind: audio_consuming_consumer.kind,
          audio_rtpParameters: audio_consuming_consumer.rtpParameters,
        }

      }
      // send the parameters to the client
      params = { ...video_params, ...audio_params }
      callback({ params })
    } catch (error) {
      console.log('Catching consume')
      console.log(error.message)
      callback({
        params: {
          error: error
        }
      })
    }
  })

  socket.on('consumer-resume', async ({ room_id }) => {
    const client = Peer.findByRoomId(clients, room_id)

    console.log('consumer resume')
    await client.video_consuming_consumer.resume()
    await client.audio_consuming_consumer.resume()
    console.log('Resumed successfuly')
  })

  socket.on('start-recording', async ({ room_id }) => {
    console.log("Start recording function gets called...")
    handleStartRecording(room_id)
  })

  socket.on('stop-recording', async ({ room_id }) => {
    console.log("Stop recording function gets called...")
    handleStopRecording(room_id)
  })

})


// const createTransport = async (transportType, room, options) => {
//   console.log('createTransport() [type:%s. options:%o]', transportType, options);

//   switch (transportType) {
//     case 'webRtc':
//       return await room.createWebRtcTransport(config.webRtcTransport);
//     case 'plain':
//       return await room.createPlainTransport(config.plainRtpTransport);
//   }
// };


// const publishProducerRtpStream = async (client, producer) => {
//   console.log('publishProducerRtpStream()');
//   const room = room_data.get(client.room_id);

//   // Create the mediasoup RTP Transport used to send media to the GStreamer process
//   const rtpTransportConfig = config.plainRtpTransport;

//   // If the process is set to GStreamer set rtcpMux to false
//   if (PROCESS_NAME === 'GStreamer') {
//     rtpTransportConfig.rtcpMux = false;
//   }

//   const rtpTransport = await createTransport('plain', room, rtpTransportConfig);

//   // Set the receiver RTP ports
//   const remoteRtpPort = await getPort();
//   client.remotePorts.push(remoteRtpPort);

//   let remoteRtcpPort;
//   // If rtpTransport rtcpMux is false also set the receiver RTCP ports
//   if (!rtpTransportConfig.rtcpMux) {
//     remoteRtcpPort = await getPort();
//     client.remotePorts.push(remoteRtcpPort);
//   }


//   // Connect the mediasoup RTP transport to the ports used by GStreamer
//   await rtpTransport.connect({
//     ip: '127.0.0.1',
//     port: remoteRtpPort,
//     rtcpPort: remoteRtcpPort
//   });

//   client.addTransport(rtpTransport);

//   const codecs = [];
//   // Codec passed to the RTP Consumer must match the codec in the Mediasoup router rtpCapabilities
//   const routerCodec = room.rtpCapabilities.codecs.find(
//     codec => codec.kind === producer.kind
//   );
//   codecs.push(routerCodec);

//   const rtpCapabilities = {
//     codecs,
//     rtcpFeedback: []
//   };

//   // Start the consumer paused
//   // Once the gstreamer process is ready to consume resume and send a keyframe
//   const rtpConsumer = await rtpTransport.consume({
//     producerId: producer.id,
//     rtpCapabilities,
//     paused: true
//   });

//   client.record_consumer = rtpConsumer;

//   return {
//     remoteRtpPort,
//     remoteRtcpPort,
//     localRtcpPort: rtpTransport.rtcpTuple ? rtpTransport.rtcpTuple.localPort : undefined,
//     rtpCapabilities,
//     rtpParameters: rtpConsumer.rtpParameters
//   };
// };


const getProcess = (recordInfo) => {
  switch (PROCESS_NAME) {
    case 'GStreamer':
      return new GStreamer(recordInfo);
    case 'FFmpeg':
    default:
      return new FFmpeg(recordInfo);
  }
};


async function handleStartRecording(room_id) {
  const client = Peer.findByRoomId(clients, room_id)
  const room = room_data.get(client.room_id)

  const useAudio = true;
  const useVideo = true;

  // Start mediasoup's RTP consumer(s)

  if (useAudio) {
    const rtpTransport = await room.createPlainTransport(config.plainRtpTransport);
    client.audio_recording_transport = rtpTransport;

    await rtpTransport.connect({
      ip: config.recording.ip,
      port: config.recording.audioPort,
      rtcpPort: config.recording.audioPortRtcp,
    });

    console.log(
      "mediasoup AUDIO RTP SEND transport connected: %s:%d <--> %s:%d (%s)",
      rtpTransport.tuple.localIp,
      rtpTransport.tuple.localPort,
      rtpTransport.tuple.remoteIp,
      rtpTransport.tuple.remotePort,
      rtpTransport.tuple.protocol
    );

    console.log(
      "mediasoup AUDIO RTCP SEND transport connected: %s:%d <--> %s:%d (%s)",
      rtpTransport.rtcpTuple.localIp,
      rtpTransport.rtcpTuple.localPort,
      rtpTransport.rtcpTuple.remoteIp,
      rtpTransport.rtcpTuple.remotePort,
      rtpTransport.rtcpTuple.protocol
    );

    const rtpConsumer = await rtpTransport.consume({
      producerId: client.audio_producer.id,
      rtpCapabilities: room.rtpCapabilities, // Assume the recorder supports same formats as mediasoup's router
      paused: true,
    });
    client.audio_recording_consumer = rtpConsumer;

    console.log(
      "mediasoup AUDIO RTP SEND consumer created, kind: %s, type: %s, paused: %s, SSRC: %s CNAME: %s",
      rtpConsumer.kind,
      rtpConsumer.type,
      rtpConsumer.paused,
      rtpConsumer.rtpParameters.encodings[0].ssrc,
      rtpConsumer.rtpParameters.rtcp.cname
    );
  }

  if (useVideo) {
    const rtpTransport = await room.createPlainTransport(config.plainRtpTransport);
    client.video_recording_transport = rtpTransport;

    await rtpTransport.connect({
      ip: config.recording.ip,
      port: config.recording.audioPort,
      rtcpPort: config.recording.audioPortRtcp,
    });

    console.log(
      "mediasoup VIDEO RTP SEND transport connected: %s:%d <--> %s:%d (%s)",
      rtpTransport.tuple.localIp,
      rtpTransport.tuple.localPort,
      rtpTransport.tuple.remoteIp,
      rtpTransport.tuple.remotePort,
      rtpTransport.tuple.protocol
    );

    console.log(
      "mediasoup VIDEO RTCP SEND transport connected: %s:%d <--> %s:%d (%s)",
      rtpTransport.rtcpTuple.localIp,
      rtpTransport.rtcpTuple.localPort,
      rtpTransport.rtcpTuple.remoteIp,
      rtpTransport.rtcpTuple.remotePort,
      rtpTransport.rtcpTuple.protocol
    );

    const rtpConsumer = await rtpTransport.consume({
      producerId: client.video_producer.id,
      rtpCapabilities: room.rtpCapabilities, // Assume the recorder supports same formats as mediasoup's router
      paused: true,
    });
    client.video_recording_consumer = rtpConsumer;

    console.log(
      "mediasoup VIDEO RTP SEND consumer created, kind: %s, type: %s, paused: %s, SSRC: %s CNAME: %s",
      rtpConsumer.kind,
      rtpConsumer.type,
      rtpConsumer.paused,
      rtpConsumer.rtpParameters.encodings[0].ssrc,
      rtpConsumer.rtpParameters.rtcp.cname
    );
  }

  // ----

  switch (PROCESS_NAME) {
    case "ffmpeg":
      await startRecordingFfmpeg();
      break;
    case "gstreamer":
      await startRecordingGstreamer(client);
      break;
    case "external":
      await startRecordingExternal();
      break;
    default:
      console.warn("Invalid recorder:", PROCESS_NAME);
      break;
  }

  if (useAudio) {
    const consumer = client.audio_recording_consumer;
    console.log(
      "Resume mediasoup RTP consumer, kind: %s, type: %s",
      consumer.kind,
      consumer.type
    );
    consumer.resume();
  }
  if (useVideo) {
    const consumer = client.video_recording_consumer;
    console.log(
      "Resume mediasoup RTP consumer, kind: %s, type: %s",
      consumer.kind,
      consumer.type
    );
    consumer.resume();
  }
}

async function startRecordingFfmpeg() {}
async function startRecordingExternal() {}

function startRecordingGstreamer(client) {
  // Return a Promise that can be awaited
  let recResolve;
  const promise = new Promise((res, _rej) => {
    recResolve = res;
  });

  const useAudio = true;
  const useVideo = true;
  const useH264 = true;

  let cmdInputPath = `${__dirname}/recording/input-vp8.sdp`;
  let cmdOutputPath = `${__dirname}/recording/output-gstreamer-vp8.webm`;
  let cmdMux = "webmmux";
  let cmdAudioBranch = "";
  let cmdVideoBranch = "";

  if (useAudio) {
    // prettier-ignore
    cmdAudioBranch =
      "demux. ! queue \
      ! rtpopusdepay \
      ! opusparse \
      ! mux.";
  }

  if (useVideo) {
    if (useH264) {
      cmdInputPath = `${__dirname}/recording/input-h264.sdp`;
      cmdOutputPath = `${__dirname}/recording/output-gstreamer-h264.mp4`;
      cmdMux = `mp4mux faststart=true faststart-file=${cmdOutputPath}.tmp`;

      // prettier-ignore
      cmdVideoBranch =
        "demux. ! queue \
        ! rtph264depay \
        ! h264parse \
        ! mux.";
    } else {
      // prettier-ignore
      cmdVideoBranch =
        "demux. ! queue \
        ! rtpvp8depay \
        ! mux.";
    }
  }

  // Run process
  const cmdEnv = {
    GST_DEBUG: config.gstreamer.logLevel,
    ...process.env, // This allows overriding $GST_DEBUG from the shell
  };
  const cmdProgram = "gst-launch-1.0"; // Found through $PATH
  const cmdArgStr = [
    "--eos-on-shutdown",
    `filesrc location=${cmdInputPath}`,
    "! sdpdemux timeout=0 name=demux",
    `${cmdMux} name=mux`,
    `! filesink location=${cmdOutputPath}`,
    cmdAudioBranch,
    cmdVideoBranch,
  ]
    .join(" ")
    .trim();

  console.log(
    `Run command: GST_DEBUG=${cmdEnv.GST_DEBUG} ${cmdProgram} ${cmdArgStr}`
  );

  let recProcess = Process.spawn(cmdProgram, cmdArgStr.split(/\s+/), {
    env: cmdEnv,
  });
  client.process = recProcess;

  recProcess.on("error", (err) => {
    console.error("Recording process error:", err);
  });

  recProcess.on("exit", (code, signal) => {
    console.log("Recording process exit, code: %d, signal: %s", code, signal);

    client.process = null;
    stopMediasoupRtp(client);

    if (!signal || signal === "SIGINT") {
      console.log("Recording stopped");
    } else {
      console.warn(
        "Recording process didn't exit cleanly, output file might be corrupt"
      );
    }
  });

  // GStreamer writes some initial logs to stdout
  recProcess.stdout.on("data", (chunk) => {
    chunk
      .toString()
      .split(/\r?\n/g)
      .filter(Boolean) // Filter out empty strings
      .forEach((line) => {
        console.log(line);
        if (line.startsWith("Setting pipeline to PLAYING")) {
          setTimeout(() => {
            recResolve();
          }, 1000);
        }
      });
  });

  // GStreamer writes its progress logs to stderr
  recProcess.stderr.on("data", (chunk) => {
    chunk
      .toString()
      .split(/\r?\n/g)
      .filter(Boolean) // Filter out empty strings
      .forEach((line) => {
        console.log(line);
      });
  });

  return promise;
}


async function handleStopRecording(client) {
  if (client.process) {
    client.process.kill("SIGINT");
  } else {
    stopMediasoupRtp(client);
  }
}


function stopMediasoupRtp(client) {
  console.log("Stop mediasoup RTP transport and consumer");

  const useAudio = true;
  const useVideo = true;

  if (useAudio) {
    client.audio_recording_consumer.close();
    client.audio_recording_transport.close();
  }

  if (useVideo) {
    client.video_recording_consumer.close();
    client.video_recording_transport.close();
  }
}


// const startRecord = async (room_id) => {
//   let recordInfo = {};
//   const client = Peer.findByRoomId(clients, room_id)

//   recordInfo[client.video_producer.kind] = await publishProducerRtpStream(client, client.video_producer);
//   recordInfo[client.audio_producer.kind] = await publishProducerRtpStream(client, client.audio_producer);
//   recordInfo.fileName = Date.now().toString();

//   client.process = getProcess(recordInfo);

//   setTimeout(async () => {
//       // Sometimes the consumer gets resumed before the GStreamer process has fully started
//       // so wait a couple of seconds

//       await client.video_consuming_consumer.resume();
//       await client.video_consuming_consumer.requestKeyFrame();
//       await client.audio_consuming_consumer.resume();
//       await client.audio_consuming_consumer.requestKeyFrame();
//       await client.record_consumer.resume();
//       await client.record_consumer.requestKeyFrame();
//   }, 1000);
// };


// const stopRecoding = async (room_id) => {
//   const client = Peer.findByRoomId(clients, room_id)

//   if (!client) {
//     throw new Error(`Peer with id ${room_id} was not found`);
//   }

//   if (!client.process) {
//     throw new Error(`Peer with id ${room_id} is not recording`);
//   }

//   client.process.kill();
//   client.process = undefined;

//   // Release ports from port set
//   for (const remotePort of client.remotePorts) {
//     releasePort(remotePort);
//   }
// };

这是我的 H264 编解码器的 sdp 配置:

v=0
o=- 0 0 IN IP4 127.0.0.1
s=-
c=IN IP4 127.0.0.1
t=0 0
m=audio 5004 RTP/AVPF 111
a=rtcp:5005
a=rtpmap:111 opus/48000/2
a=fmtp:111 minptime=10;useinbandfec=1
m=video 5006 RTP/AVPF 125
a=rtcp:5007
a=rtpmap:125 H264/90000
a=fmtp:125 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f

这是我的 V8 编解码器的 sdp 配置:

v=0
o=- 0 0 IN IP4 127.0.0.1
s=-
c=IN IP4 127.0.0.1
t=0 0
m=audio 5004 RTP/AVPF 111
a=rtcp:5005
a=rtpmap:111 opus/48000/2
a=fmtp:111 minptime=10;useinbandfec=1
m=video 5006 RTP/AVPF 96
a=rtcp:5007
a=rtpmap:96 VP8/90000

感谢您提前的帮助

node.js gstreamer live-streaming recording mediasoup
1个回答
0
投票

你能解决你的问题吗? 我也有和你一样的问题!

0:00:11.659570863 486226      0x5ececc0 ERROR        rtpjitterbuffer gstrtpjitterbuffer.c:1401:gst_jitter_buffer_sink_parse_caps:<jitterbuffer> Got caps with wrong payload type (got 127, expected 101)
© www.soinside.com 2019 - 2024. All rights reserved.