Re: dvb-s h264 channels with standard definition

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Hi,

Igor wrote:

> there is several h264-channels in dvb-s modulation with standard definition.
> For example, 20 Russian channels on 40E, 
> http://www.lyngsat.com/eam1.html
> 
> two channels on 16E
> http://www.lyngsat.com/ew2.html
> 
> how is it possible to watch it by VDR ?

The next vdr-xine release will support it. You may want to try the
attached patch for VDR-1.5.x, which prepares cRemux for H.264 (most
likely one of the next VDR releases will contain this patch). After that
you should be able to record such channels. Be aware that channels.conf
may nolonger be compatible with non patched VDR versions.

The recordings should already be playable with xine-lib-1.1.7:

	xine .../001.vdr#demux:mpeg_pes

Be aware that xine-lib's H.264 support is based on ffmpeg and there are
known issues with interlaced material. So it is likely that xine will crash.

In case you are able to create a recording, would you please be so kind
and provide me a sample for testing.

Bye.
-- 
Dipl.-Inform. (FH) Reinhard Nissl
mailto:rnissl@xxxxxx
diff -Nurp ../vdr-1.5.2-orig/channels.h ./channels.h
--- ../vdr-1.5.2-orig/channels.h	2007-01-05 11:37:35.000000000 +0100
+++ ./channels.h	2007-05-06 21:00:53.000000000 +0200
@@ -47,6 +47,16 @@
 #define CA_ENCRYPTED_MIN 0x0100
 #define CA_ENCRYPTED_MAX 0xFFFF
 
+// VPID can be in the range 0...8191. Offsets of 10000 are used to indicate special video codings.
+#define VPID_OFFSET_BASE         10000
+#define VPID_FROM_ANY(pid)       ((pid) % VPID_OFFSET_BASE) // returns the plain VPID
+#define VPID_TO_XXX(pid, offset) (pid + offset)
+#define VPID_IS_XXX(pid, offset) ((pid - VPID_FROM_ANY(pid)) == offset)
+// 1. special video coding: H.264
+#define VPID_OFFSET_H264         (1 * VPID_OFFSET_BASE)
+#define VPID_TO_H264(pid)        VPID_TO_XXX(pid, VPID_OFFSET_H264)
+#define VPID_IS_H264(pid)        VPID_IS_XXX(pid, VPID_OFFSET_H264)
+
 struct tChannelParameterMap {
   int userValue;
   int driverValue;
diff -Nurp ../vdr-1.5.2-orig/dvbplayer.c ./dvbplayer.c
--- ../vdr-1.5.2-orig/dvbplayer.c	2006-04-17 14:45:48.000000000 +0200
+++ ./dvbplayer.c	2007-05-06 21:00:53.000000000 +0200
@@ -712,7 +712,8 @@ void cDvbPlayer::Goto(int Index, bool St
               b[r++] = 0x00;
               b[r++] = 0x00;
               b[r++] = 0x01;
-              b[r++] = 0xB7;
+              b[r] = (cRemux::IsFrameH264(b, r) ? 10 : 0xB7);
+              r++;
               }
            DeviceStillPicture(b, r);
            }
diff -Nurp ../vdr-1.5.2-orig/menu.c ./menu.c
--- ../vdr-1.5.2-orig/menu.c	2007-02-25 15:04:33.000000000 +0100
+++ ./menu.c	2007-05-06 21:01:18.000000000 +0200
@@ -218,6 +218,7 @@ class cMenuEditChannel : public cOsdMenu
 private:
   cChannel *channel;
   cChannel data;
+  int vcodec;
   char name[256];
   void Setup(void);
 public:
@@ -231,6 +232,9 @@ cMenuEditChannel::cMenuEditChannel(cChan
   channel = Channel;
   if (channel) {
      data = *channel;
+     vcodec = data.vpid / VPID_OFFSET_BASE;
+     data.vpid = VPID_FROM_ANY(data.vpid);
+
      if (New) {
         channel = NULL;
         data.nid = 0;
@@ -249,20 +253,26 @@ void cMenuEditChannel::Setup(void)
 
   Clear();
 
+  static const char *VideoCodecValues[] = {
+    tr("MPEG1/2"),
+    tr("H.264")
+    };
+
   // Parameters for all types of sources:
   strn0cpy(name, data.name, sizeof(name));
   Add(new cMenuEditStrItem( tr("Name"),          name, sizeof(name), tr(FileNameChars)));
   Add(new cMenuEditSrcItem( tr("Source"),       &data.source));
   Add(new cMenuEditIntItem( tr("Frequency"),    &data.frequency));
-  Add(new cMenuEditIntItem( tr("Vpid"),         &data.vpid,  0, 0x1FFF));
-  Add(new cMenuEditIntItem( tr("Ppid"),         &data.ppid,  0, 0x1FFF));
+  Add(new cMenuEditIntItem( tr("Vpid"),         &data.vpid,     0, 0x1FFF));
+  Add(new cMenuEditStraItem( tr("Vcodec"),      &vcodec, sizeof(VideoCodecValues) / sizeof(*VideoCodecValues), VideoCodecValues));
+  Add(new cMenuEditIntItem( tr("Ppid"),         &data.ppid,     0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Apid1"),        &data.apids[0], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Apid2"),        &data.apids[1], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Dpid1"),        &data.dpids[0], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Dpid2"),        &data.dpids[1], 0, 0x1FFF));
-  Add(new cMenuEditIntItem( tr("Tpid"),         &data.tpid,  0, 0x1FFF));
+  Add(new cMenuEditIntItem( tr("Tpid"),         &data.tpid,     0, 0x1FFF));
   Add(new cMenuEditCaItem(  tr("CA"),           &data.caids[0]));
-  Add(new cMenuEditIntItem( tr("Sid"),          &data.sid, 1, 0xFFFF));
+  Add(new cMenuEditIntItem( tr("Sid"),          &data.sid,      1, 0xFFFF));
   /* XXX not yet used
   Add(new cMenuEditIntItem( tr("Nid"),          &data.nid, 0));
   Add(new cMenuEditIntItem( tr("Tid"),          &data.tid, 0));
@@ -294,12 +304,14 @@ eOSState cMenuEditChannel::ProcessKey(eK
         if (Channels.HasUniqueChannelID(&data, channel)) {
            data.name = strcpyrealloc(data.name, name);
            if (channel) {
+              data.vpid += vcodec * VPID_OFFSET_BASE; 
               *channel = data;
               isyslog("edited channel %d %s", channel->Number(), *data.ToText());
               state = osBack;
               }
            else {
               channel = new cChannel;
+              data.vpid += vcodec * VPID_OFFSET_BASE; 
               *channel = data;
               Channels.Add(channel);
               Channels.ReNumber();
diff -Nurp ../vdr-1.5.2-orig/pat.c ./pat.c
--- ../vdr-1.5.2-orig/pat.c	2007-01-05 11:41:55.000000000 +0100
+++ ./pat.c	2007-05-06 21:00:53.000000000 +0200
@@ -338,6 +338,10 @@ void cPatFilter::Process(u_short Pid, u_
         int NumDpids = 0;
         for (SI::Loop::Iterator it; pmt.streamLoop.getNext(stream, it); ) {
             switch (stream.getStreamType()) {
+              case 0x19: // advanced codec HD digital television service
+              case 0x1b: // ISO/IEC 14496-10 Video (MPEG-4 part 10/AVC, aka H.264)
+                      Vpid = VPID_TO_H264(stream.getPid());
+                      break;
               case 1: // STREAMTYPE_11172_VIDEO
               case 2: // STREAMTYPE_13818_VIDEO
                       Vpid = stream.getPid();
diff -Nurp ../vdr-1.5.2-orig/recorder.c ./recorder.c
--- ../vdr-1.5.2-orig/recorder.c	2007-02-24 17:36:24.000000000 +0100
+++ ./recorder.c	2007-05-06 21:00:53.000000000 +0200
@@ -127,7 +127,7 @@ void cFileWriter::Action(void)
 // --- cRecorder -------------------------------------------------------------
 
 cRecorder::cRecorder(const char *FileName, tChannelID ChannelID, int Priority, int VPid, const int *APids, const int *DPids, const int *SPids)
-:cReceiver(ChannelID, Priority, VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
+:cReceiver(ChannelID, Priority, VPID_FROM_ANY(VPid), APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
 ,cThread("recording")
 {
   // Make sure the disk is up and running:
diff -Nurp ../vdr-1.5.2-orig/remux.c ./remux.c
--- ../vdr-1.5.2-orig/remux.c	2007-02-24 17:36:10.000000000 +0100
+++ ./remux.c	2007-05-06 21:00:53.000000000 +0200
@@ -164,7 +164,7 @@ protected:
   uint32_t localScanner;
   int localStart;
   bool PushOutPacket(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count);
-  virtual int QuerySnoopSize() { return 4; }
+  virtual int QuerySnoopSize(void) { return 4; }
   virtual void Reset(void);
   };
 
@@ -238,6 +238,556 @@ bool cCommonRepacker::PushOutPacket(cRin
   return true;
 }
 
+namespace H264
+{
+  // --- cException ----------------------------------------------------------
+
+  class cException {
+  private:
+    cString message;
+  public:
+    cException(const cString &Message) { message = Message; }
+    const cString &Message(void) const { return message; }
+  };
+
+  // --- cBitReader ----------------------------------------------------------
+
+  class cBitReader {
+  private:
+    uint8_t *data;
+    int count;
+    uint32_t bits;
+    uint32_t bitsAvail;
+    int countZeros;
+    uint8_t NextByte(void);
+    uint32_t ReadBits(uint32_t n);
+  public:
+    cBitReader(uint8_t *Data, int Count);
+    uint32_t u(uint32_t n) { return ReadBits(n); } // read n bits as unsigned number
+    uint32_t ue(void); // read Exp-Golomb coded unsigned number
+    int32_t se(void); // read Exp-Golomb coded signed number
+  };
+
+  cBitReader::cBitReader(unsigned char *Data, int Count)
+  {
+    data = Data;
+    count = Count;
+    bitsAvail = 0;
+    countZeros = 0;
+  }
+
+  inline uint8_t cBitReader::NextByte(void)
+  {
+    if (count < 1) // there is no more data left in this NAL unit
+       throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+    // detect 00 00 00, 00 00 01 and 00 00 03 and handle them
+    if (*data == 0x00) {
+       if (countZeros >= 3) // 00 00 00: the current NAL unit should have been terminated already before this sequence
+          throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+       // increase the zero counter as we have a zero byte
+       countZeros++;
+       }
+    else {
+       if (countZeros >= 2) {
+          if (*data == 0x01) // 00 00 01: the current NAL unit should have been terminated already before this sequence
+             throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+          if (*data == 0x03) {
+             // 00 00 03 xx: the emulation prevention byte 03 needs to be removed and xx must be returned
+             if (count < 2)
+                throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+             // drop 03 and xx will be returned below
+             count--;
+             data++;
+             }
+          }
+       // reset the zero counter as we had a non zero byte
+       countZeros = 0;
+       }
+    count--;
+    return *data++;
+  }
+
+  inline uint32_t cBitReader::ReadBits(uint32_t n)
+  {
+    // fill the "shift register" bits with sufficient data
+    while (n > bitsAvail) {
+          bits <<= 8;
+          bits |= NextByte();
+          bitsAvail += 8;
+          if (bitsAvail > 32)
+             throw new cException("ERROR: H264::cBitReader::ReadBits(): bitbuffer overflow");
+          }
+    // return n most significant bits
+    bitsAvail -= n;
+    return (bits >> bitsAvail) & (((uint32_t)1 << n) - 1);
+  }
+
+  inline uint32_t cBitReader::ue(void)
+  {
+    // read and decode an Exp-Golomb coded unsigned number
+    //
+    // bitstring             resulting number
+    //       1               0
+    //     0 1 x             1 ... 2
+    //   0 0 1 x y           3 ... 6
+    // 0 0 0 1 x y z         7 ... 14
+    // ...
+    int LeadingZeroBits = 0;
+    while (ReadBits(1) == 0)
+          LeadingZeroBits++;
+    if (LeadingZeroBits >= 32)
+       throw new cException("ERROR: H264::cBitReader::ue(): overflow");
+    return ((uint32_t)1 << LeadingZeroBits) - 1 + ReadBits(LeadingZeroBits);
+  }
+
+  inline int32_t cBitReader::se(void)
+  {
+    // read and decode an Exp-Golomb coded signed number
+    //
+    // unsigned value       resulting signed value
+    // 0                     0
+    // 1                    +1
+    // 2                    -1
+    // 3                    +2
+    // 4                    -2
+    // ...
+    uint32_t r = ue();
+    if (r > 0xFFFFFFFE)
+       throw new cException("ERROR: H264::cBitReader::se(): overflow");
+    return (1 - 2 * (r & 1)) * ((r + 1) / 2);
+  }
+
+  // --- cSequenceParameterSet -----------------------------------------------
+
+  class cSequenceParameterSet {
+  private:
+    friend class cContext;
+    bool defined;
+    uint32_t log2MaxFrameNum;
+    uint32_t log2MaxPicOrderCntLsb;
+  public:
+    cSequenceParameterSet(void);
+    bool Defined(void) { return defined; }
+    void log2_max_frame_num_minus4(uint32_t Value) { log2MaxFrameNum = Value + 4; }
+    uint32_t log2_max_frame_num_minus4(void) const { return log2MaxFrameNum - 4; }
+    uint32_t log2_max_frame_num(void) const { return log2MaxFrameNum; }
+    void log2_max_pic_order_cnt_lsb_minus4(uint32_t Value) { log2MaxPicOrderCntLsb = Value + 4; }
+    uint32_t log2_max_pic_order_cnt_lsb_minus4(void) const { return log2MaxPicOrderCntLsb - 4; }
+    uint32_t log2_max_pic_order_cnt_lsb(void) const { return log2MaxPicOrderCntLsb; }
+    uint32_t seq_parameter_set_id;
+    uint32_t pic_order_cnt_type;
+    uint32_t delta_pic_order_always_zero_flag;
+    uint32_t frame_mbs_only_flag;
+  };
+
+  cSequenceParameterSet::cSequenceParameterSet(void)
+  {
+    memset(this, 0, sizeof (*this));
+    log2_max_frame_num_minus4(0);
+    log2_max_pic_order_cnt_lsb_minus4(0);
+  }
+
+  // --- cPictureParameterSet ------------------------------------------------
+
+  class cPictureParameterSet {
+  private:
+    friend class cContext;
+    bool defined;
+  public:
+    cPictureParameterSet(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) { return defined; }
+    uint32_t pic_parameter_set_id;
+    uint32_t seq_parameter_set_id;
+    uint32_t pic_order_present_flag;
+  };
+
+  // --- cSliceHeader --------------------------------------------------------
+
+  class cSliceHeader {
+  private:
+    friend class cContext;
+    bool defined;
+    bool isFirstSliceOfCurrentAccessUnit;
+    uint32_t picOrderCntType;
+    uint32_t nalRefIdc;
+    uint32_t nalUnitType;
+  public:
+    cSliceHeader(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) const { return defined; }
+    bool IsFirstSliceOfCurrentAccessUnit(void) const { return isFirstSliceOfCurrentAccessUnit; }
+    void nal_ref_idc(uint32_t Value) { nalRefIdc = Value; }
+    uint32_t nal_ref_idc(void) const { return nalRefIdc; }
+    void nal_unit_type(uint32_t Value) { nalUnitType = Value; }
+    uint32_t nal_unit_type(void) const { return nalUnitType; }
+    uint32_t slice_type;
+    uint32_t pic_parameter_set_id;
+    uint32_t frame_num;
+    uint32_t field_pic_flag;
+    uint32_t bottom_field_flag;
+    uint32_t idr_pic_id;
+    uint32_t pic_order_cnt_lsb;
+    int32_t delta_pic_order_cnt_bottom;
+    int32_t delta_pic_order_cnt[2];
+    enum eAccessUnitType {
+      Frame = 0,
+      TopField,
+      BottomField
+      };
+    eAccessUnitType GetAccessUnitType() const { return (eAccessUnitType)(field_pic_flag + bottom_field_flag); }
+  };
+
+  // --- cContext ------------------------------------------------------------
+
+  class cContext {
+  private:
+    cSequenceParameterSet spsStore[32];
+    cPictureParameterSet ppsStore[256];
+    cSequenceParameterSet *sps; // active Sequence Parameter Set
+    cPictureParameterSet *pps; // active Picture Parameter Set
+    cSliceHeader sh;
+  public:
+    cContext(void) { sps = 0; pps = 0; }
+    void Define(cSequenceParameterSet &SPS);
+    void Define(cPictureParameterSet &PPS);
+    void Define(cSliceHeader &SH);
+    void ActivateSPS(uint32_t ID);
+    void ActivatePPS(uint32_t ID);
+    const cSequenceParameterSet *ActiveSPS(void) const { return sps; }
+    const cPictureParameterSet *ActivePPS(void) const { return pps; }
+    const cSliceHeader *CurrentSlice(void) const { return sh.Defined() ? &sh : 0; }
+  };
+
+  inline void cContext::ActivateSPS(uint32_t ID)
+  {
+    if (ID >= (sizeof (spsStore) / sizeof (*spsStore)))
+       throw new cException("ERROR: H264::cContext::ActivateSPS(): id out of range");
+    if (!spsStore[ID].Defined())
+       throw new cException("ERROR: H264::cContext::ActivateSPS(): requested SPS is undefined");
+    sps = &spsStore[ID];
+  }
+
+  inline void cContext::ActivatePPS(uint32_t ID)
+  {
+    if (ID >= (sizeof (ppsStore) / sizeof (*ppsStore)))
+       throw new cException("ERROR: H264::cContext::ActivatePPS(): id out of range");
+    if (!ppsStore[ID].Defined())
+       throw new cException("ERROR: H264::cContext::ActivatePPS(): requested PPS is undefined");
+    pps = &ppsStore[ID];
+    ActivateSPS(pps->seq_parameter_set_id);
+  }
+
+  inline void cContext::Define(cSequenceParameterSet &SPS)
+  {
+    if (SPS.seq_parameter_set_id >= (sizeof (spsStore) / sizeof (*spsStore)))
+       throw new cException("ERROR: H264::cContext::DefineSPS(): id out of range");
+    SPS.defined = true;
+    spsStore[SPS.seq_parameter_set_id] = SPS;
+  }
+
+  inline void cContext::Define(cPictureParameterSet &PPS)
+  {
+    if (PPS.pic_parameter_set_id >= (sizeof (ppsStore) / sizeof (*ppsStore)))
+       throw new cException("ERROR: H264::cContext::DefinePPS(): id out of range");
+    PPS.defined = true;
+    ppsStore[PPS.pic_parameter_set_id] = PPS;
+  }
+
+  inline void cContext::Define(cSliceHeader &SH)
+  {
+    SH.defined = true;
+    SH.picOrderCntType = ActiveSPS()->pic_order_cnt_type;
+
+    // ITU-T Rec. H.264 (03/2005): 7.4.1.2.4
+    SH.isFirstSliceOfCurrentAccessUnit = !sh.Defined()
+      || (sh.frame_num                  != SH.frame_num)
+      || (sh.pic_parameter_set_id       != SH.pic_parameter_set_id)
+      || (sh.field_pic_flag             != SH.field_pic_flag)
+      || (sh.bottom_field_flag          != SH.bottom_field_flag)
+      || (sh.nalRefIdc                  != SH.nalRefIdc
+      && (sh.nalRefIdc == 0             || SH.nalRefIdc == 0))
+      || (sh.picOrderCntType == 0       && SH.picOrderCntType == 0
+      && (sh.pic_order_cnt_lsb          != SH.pic_order_cnt_lsb
+      ||  sh.delta_pic_order_cnt_bottom != SH.delta_pic_order_cnt_bottom))
+      || (sh.picOrderCntType == 1       && SH.picOrderCntType == 1
+      && (sh.delta_pic_order_cnt[0]     != SH.delta_pic_order_cnt[0]
+      ||  sh.delta_pic_order_cnt[1]     != SH.delta_pic_order_cnt[1]))
+      || (sh.nalUnitType                != SH.nalUnitType
+      && (sh.nalUnitType == 5           || SH.nalUnitType == 5))
+      || (sh.nalUnitType == 5           && SH.nalUnitType == 5
+      &&  sh.idr_pic_id                 != SH.idr_pic_id);
+        
+    sh = SH;
+  }
+
+  // --- cSimpleBuffer -------------------------------------------------------
+
+  class cSimpleBuffer {
+  private:
+    uchar *data;
+    int size;
+    int avail;
+    int gotten;
+  public:
+    cSimpleBuffer(int Size);
+    ~cSimpleBuffer();
+    int Available(void) { return avail; }
+    int Free(void) { return size - avail; }
+    int Put(const uchar *Data, int Count);
+    uchar *Get(int &Count);
+    void Del(int Count);
+    void Clear(void);
+  };
+
+  cSimpleBuffer::cSimpleBuffer(int Size)
+  {
+    size = Size;
+    data = new uchar[size];
+    avail = 0;
+    gotten = 0;
+  }
+
+  cSimpleBuffer::~cSimpleBuffer()
+  {
+    delete data;
+  }
+
+  int cSimpleBuffer::Put(const uchar *Data, int Count)
+  {
+    if (Count < 0) {
+       if (avail + Count < 0)
+          Count = 0 - avail;
+       if (avail + Count < gotten)
+          Count = gotten - avail;
+       avail += Count;
+       return Count;
+       }
+    if (avail + Count > size)
+       Count = size - avail;
+    memcpy(data + avail, Data, Count);
+    avail += Count;
+    return Count;
+  }
+
+  uchar *cSimpleBuffer::Get(int &Count)
+  {
+    Count = gotten = avail;
+    return data;
+  }
+
+  void cSimpleBuffer::Del(int Count)
+  {
+    if (Count < 0)
+       return;
+    if (Count > gotten) {
+       esyslog("ERROR: invalid Count in H264::cSimpleBuffer::Del: %d (limited to %d)", Count, gotten);
+       Count = gotten;
+       }
+    if (Count < avail)
+       memmove(data, data + Count, avail - Count);
+    avail -= Count;
+    gotten = 0;
+  }
+
+  void cSimpleBuffer::Clear(void)
+  {
+    avail = gotten = 0;
+  }
+
+  // --- cParser -------------------------------------------------------------
+
+  class cParser {
+  private:
+    bool syncing;
+    cContext context;
+    cSimpleBuffer nalUnitDataBuffer;
+    void ParseSequenceParameterSet(uint8_t *Data, int Count);
+    void ParsePictureParameterSet(uint8_t *Data, int Count);
+    void ParseSlice(uint8_t *Data, int Count);
+  public:
+    cParser(void);
+    const cContext &Context(void) const { return context; }
+    void PutNalUnitData(const uchar *Data, int Count);
+    void Reset(void);
+    void Process();
+  };
+
+  cParser::cParser(void)
+    : nalUnitDataBuffer(1000)
+  {
+    // the above buffer size of 1000 bytes wont hold a complete NAL unit but
+    // should be sufficient for the relevant part used for parsing.
+    Reset();
+  }
+
+  void cParser::Reset(void)
+  {
+    context = cContext();
+    nalUnitDataBuffer.Clear();
+    syncing = true;
+  }
+
+  void cParser::ParseSequenceParameterSet(uint8_t *Data, int Count)
+  {
+    cSequenceParameterSet SPS;
+
+    cBitReader br(Data + 1, Count - 1);
+    uint32_t profile_idc = br.u(8);
+    /* uint32_t constraint_set0_flag = */ br.u(1);
+    /* uint32_t constraint_set1_flag = */ br.u(1);
+    /* uint32_t constraint_set2_flag = */ br.u(1);
+    /* uint32_t constraint_set3_flag = */ br.u(1);
+    /* uint32_t reserved_zero_4bits = */ br.u(4);
+    /* uint32_t level_idc = */ br.u(8);
+    SPS.seq_parameter_set_id = br.ue();
+    if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || profile_idc == 144) {
+       uint32_t chroma_format_idc = br.ue();
+       if (chroma_format_idc == 3) {
+          /* uint32_t residual_colour_transform_flag = */ br.u(1);
+          }
+       /* uint32_t bit_depth_luma_minus8 = */ br.ue();
+       /* uint32_t bit_depth_chroma_minus8 = */ br.ue();
+       /* uint32_t qpprime_y_zero_transform_bypass_flag = */ br.u(1);
+       uint32_t seq_scaling_matrix_present_flag = br.u(1);
+       if (seq_scaling_matrix_present_flag) {
+          for (int i = 0; i < 8; i++) {
+              uint32_t seq_scaling_list_present_flag = br.u(1);
+              if (seq_scaling_list_present_flag) {
+                 int sizeOfScalingList = (i < 6) ? 16 : 64;
+                 int lastScale = 8;
+                 int nextScale = 8;
+                 for (int j = 0; j < sizeOfScalingList; j++) {
+                     if (nextScale != 0) {
+                        int32_t delta_scale = br.se();
+                        nextScale = (lastScale + delta_scale + 256) % 256;
+                        }
+                     lastScale = (nextScale == 0) ? lastScale : nextScale;
+                     }
+                 }
+              }
+          }
+       }
+    SPS.log2_max_frame_num_minus4(br.ue());
+    SPS.pic_order_cnt_type = br.ue();
+    if (SPS.pic_order_cnt_type == 0)
+       SPS.log2_max_pic_order_cnt_lsb_minus4(br.ue());
+    else if (SPS.pic_order_cnt_type == 1) {
+       SPS.delta_pic_order_always_zero_flag = br.u(1);
+       /* int32_t offset_for_non_ref_pic = */ br.se();
+       /* int32_t offset_for_top_to_bottom_field = */ br.se();
+       uint32_t num_ref_frames_in_pic_order_cnt_cycle = br.ue();
+       for (uint32_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++) {
+           /* int32_t offset_for_ref_frame = */ br.se();
+           }
+       }
+    /* uint32_t num_ref_frames = */ br.ue();
+    /* uint32_t gaps_in_frame_num_value_allowed_flag = */ br.u(1);
+    /* uint32_t pic_width_in_mbs_minus1 = */ br.ue();
+    /* uint32_t pic_height_in_map_units_minus1 = */ br.ue();
+    SPS.frame_mbs_only_flag = br.u(1);
+
+    context.Define(SPS);
+  }
+
+  void cParser::ParsePictureParameterSet(uint8_t *Data, int Count)
+  {
+    cPictureParameterSet PPS;
+
+    cBitReader br(Data + 1, Count - 1);
+    PPS.pic_parameter_set_id = br.ue();
+    PPS.seq_parameter_set_id = br.ue();
+    /* uint32_t entropy_coding_mode_flag = */ br.u(1);
+    PPS.pic_order_present_flag = br.u(1);
+
+    context.Define(PPS);
+  }
+
+  void cParser::ParseSlice(uint8_t *Data, int Count)
+  {
+    cSliceHeader SH;
+
+    cBitReader br(Data + 1, Count - 1);
+    SH.nal_ref_idc(Data[0] >> 5);
+    SH.nal_unit_type(Data[0] & 0x1F);
+    /* uint32_t first_mb_in_slice = */ br.ue();
+    SH.slice_type = br.ue();
+    SH.pic_parameter_set_id = br.ue();
+
+    context.ActivatePPS(SH.pic_parameter_set_id);
+    const cSequenceParameterSet *SPS = context.ActiveSPS();
+
+    SH.frame_num = br.u(SPS->log2_max_frame_num());
+    if (!SPS->frame_mbs_only_flag) {
+       SH.field_pic_flag = br.u(1);
+       if (SH.field_pic_flag)
+          SH.bottom_field_flag = br.u(1);
+       }
+    if (SH.nal_unit_type() == 5)
+       SH.idr_pic_id = br.ue();
+    if (SPS->pic_order_cnt_type == 0) {
+       SH.pic_order_cnt_lsb = br.u(SPS->log2_max_pic_order_cnt_lsb());
+       const cPictureParameterSet *PPS = context.ActivePPS();
+       if (PPS->pic_order_present_flag && !SH.field_pic_flag)
+          SH.delta_pic_order_cnt_bottom = br.se();
+       }
+    if (SPS->pic_order_cnt_type == 1 && !SPS->delta_pic_order_always_zero_flag) {
+       SH.delta_pic_order_cnt[0] = br.se();
+       const cPictureParameterSet *PPS = context.ActivePPS();
+       if (PPS->pic_order_present_flag && !SH.field_pic_flag)
+          SH.delta_pic_order_cnt[1] = br.se();
+       }
+    
+    context.Define(SH);
+  }
+
+  void cParser::PutNalUnitData(const uchar *Data, int Count)
+  {
+    int n = nalUnitDataBuffer.Put(Data, Count);
+    // typically less than a complete NAL unit are needed for parsing the
+    // relevant data, so simply ignore the overflow condition.
+    if (false && n != Count)
+       esyslog("ERROR: H264::cParser::PutNalUnitData(): NAL unit data buffer overflow");
+  }
+
+  void cParser::Process()
+  {
+    // nalUnitDataBuffer contains the head of the current NAL unit -- let's parse it 
+    int Count = 0;
+    uchar *Data = nalUnitDataBuffer.Get(Count);
+    if (Data && Count >= 4) {
+       if (Data[0] == 0x00 && Data[1] == 0x00 && Data[2] == 0x01) {
+          int nal_unit_type = Data[3] & 0x1F;
+          try {
+              switch (nal_unit_type) {
+                case 1: // coded slice of a non-IDR picture
+                case 2: // coded slice data partition A
+                case 5: // coded slice of an IDR picture
+                     ParseSlice(Data + 3, Count - 3);
+                     break;
+                case 7: // sequence parameter set
+                     syncing = false; // from now on, we should get reliable results
+                     ParseSequenceParameterSet(Data + 3, Count - 3);
+                     break;
+                case 8: // picture parameter set
+                     ParsePictureParameterSet(Data + 3, Count - 3);
+                     break;
+                }
+              }
+          catch (cException *e) {
+              if (!syncing) // suppress typical error messages while syncing
+                 esyslog(e->Message());
+              delete e;
+              }
+          }
+       else if (!syncing)
+          esyslog("ERROR: H264::cParser::Process(): NAL unit data buffer content is invalid");
+       }
+    else if (!syncing)
+       esyslog("ERROR: H264::cParser::Process(): NAL unit data buffer content is too short");
+    // reset the buffer for the next NAL unit
+    nalUnitDataBuffer.Clear();
+  }
+}
+
 // --- cVideoRepacker --------------------------------------------------------
 
 class cVideoRepacker : public cCommonRepacker {
@@ -248,6 +798,9 @@ private:
     scanPicture
     };
   int state;
+  H264::cParser *h264Parser;
+  void PushOutCurrentFrameAndStartNewPacket(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
+  void HandleNalUnit(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel, const uchar *&NalPayload);
   void HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
   inline bool ScanDataForStartCodeSlow(const uchar *const Data);
   inline bool ScanDataForStartCodeFast(const uchar *&Data, const uchar *Limit);
@@ -256,30 +809,103 @@ private:
   inline bool ScanForEndOfPictureSlow(const uchar *&Data);
   inline bool ScanForEndOfPictureFast(const uchar *&Data, const uchar *Limit);
   inline bool ScanForEndOfPicture(const uchar *&Data, const uchar *Limit);
+  void CollectNalUnitData(const uchar *Data, int Count);
 public:
-  cVideoRepacker(void);
+  cVideoRepacker(bool H264);
+  ~cVideoRepacker();
   virtual void Reset(void);
   virtual void Repack(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count);
   virtual int BreakAt(const uchar *Data, int Count);
   };
 
-cVideoRepacker::cVideoRepacker(void)
+cVideoRepacker::cVideoRepacker(bool H264)
 {
+  h264Parser = (H264 ? new H264::cParser() : 0);
   Reset();
 }
 
+cVideoRepacker::~cVideoRepacker()
+{
+  delete h264Parser;
+}
+
 void cVideoRepacker::Reset(void)
 {
   cCommonRepacker::Reset();
+  if (h264Parser)
+     h264Parser->Reset();
   scanner = 0xFFFFFFFF;
   state = syncing;
 }
 
-void cVideoRepacker::HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+void cVideoRepacker::CollectNalUnitData(const uchar *Data, int Count)
 {
-  // synchronisation is detected some bytes after frame start.
-  const int SkippedBytesLimit = 4;
+  if (h264Parser)
+     h264Parser->PutNalUnitData(Data, Count);
+}
+
+void cVideoRepacker::HandleNalUnit(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel, const uchar *&NalPayload)
+{
+  // valid NAL units start with a zero bit
+  if (*Data & 0x80) {
+     LOG("cVideoRepacker: found invalid NAL unit: stream seems to be scrambled or not demultiplexed");
+     return;
+     }
+
+  // collect NAL unit's remaining data and process it 
+  CollectNalUnitData(NalPayload, Data - 3 - NalPayload);
+  h264Parser->Process();
+
+  // collect 0x00 0x00 0x01 for current NAL unit
+  static const uchar InitPayload[3] = { 0x00, 0x00, 0x01 };
+  CollectNalUnitData(InitPayload, sizeof (InitPayload));
+  NalPayload = Data;
+  
+  // which kind of NAL unit have we got?
+  int nal_unit_type = *Data & 0x1F;
+  switch (nal_unit_type) {
+    case 1: // coded slice of a non-IDR picture
+    case 2: // coded slice data partition A
+    case 3: // coded slice data partition B
+    case 4: // coded slice data partition C
+    case 5: // coded slice of an IDR picture
+    case 6: // supplemental enhancement information (SEI)
+    case 7: // sequence parameter set
+    case 8: // picture parameter set
+    case 10: // end of sequence
+    case 11: // end of stream
+    case 12: // filler data
+    case 13: // sequence parameter set extension
+    case 19: // coded slice of an auxiliary coded picture without partitioning
+         break;
+    case 14 ... 18: // reserved
+    case 20 ... 23: // reserved
+         LOG("cVideoRepacker: found reserved NAL unit type: stream seems to be scrambled");
+         break;
+    case 0: // unspecified
+    case 24 ... 31: // unspecified
+         LOG("cVideoRepacker: found unspecified NAL unit type: stream seems to be scrambled");
+         break;
+    case 9: { // access unit delimiter
+         // mangle the "start code" to pass the information that the next access unit will be
+         // a bottom field to ScanVideoPacket() where this modification will be reverted.
+         const H264::cSliceHeader *SH = h264Parser->Context().CurrentSlice();
+         if (SH && SH->GetAccessUnitType() == H264::cSliceHeader::TopField)
+            *(uchar *)Data |= 0x80;
+         // the above NAL unit type indicates that the current picture is done. So let's
+         // push out the current frame to start a new packet for the next picture.
+         PushOutCurrentFrameAndStartNewPacket(Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         if (state == findPicture) {
+            // go on with scanning the picture data
+            state++;
+            }
+         }
+         break;
+    }
+}
 
+void cVideoRepacker::HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
   // which kind of start code have we got?
   switch (*Data) {
     case 0xB9 ... 0xFF: // system start codes
@@ -298,65 +924,9 @@ void cVideoRepacker::HandleStartCode(con
     case 0xB3: // sequence header code
     case 0xB8: // group start code
     case 0x00: // picture start code
-         if (state == scanPicture) {
-            // the above start codes indicate that the current picture is done. So
-            // push out the packet to start a new packet for the next picuture. If
-            // the byte count get's negative then the current buffer ends in a
-            // partitial start code that must be stripped off, as it shall be put
-            // in the next packet.
-            PushOutPacket(ResultBuffer, Payload, Data - 3 - Payload);
-            // go on with syncing to the next picture
-            state = syncing;
-            }
-         if (state == syncing) {
-            if (initiallySyncing) // omit report for the typical initial case
-               initiallySyncing = false;
-            else if (skippedBytes > SkippedBytesLimit) // report that syncing dropped some bytes
-               LOG("cVideoRepacker: skipped %d bytes to sync on next picture", skippedBytes - SkippedBytesLimit);
-            skippedBytes = 0;
-            // if there is a PES header available, then use it ...
-            if (pesHeaderBackupLen > 0) {
-               // ISO 13818-1 says:
-               // In the case of video, if a PTS is present in a PES packet header
-               // it shall refer to the access unit containing the first picture start
-               // code that commences in this PES packet. A picture start code commences
-               // in PES packet if the first byte of the picture start code is present
-               // in the PES packet.
-               memcpy(pesHeader, pesHeaderBackup, pesHeaderBackupLen);
-               pesHeaderLen = pesHeaderBackupLen;
-               pesHeaderBackupLen = 0;
-               }
-            else {
-               // ... otherwise create a continuation PES header
-               pesHeaderLen = 0;
-               pesHeader[pesHeaderLen++] = 0x00;
-               pesHeader[pesHeaderLen++] = 0x00;
-               pesHeader[pesHeaderLen++] = 0x01;
-               pesHeader[pesHeaderLen++] = StreamID; // video stream ID
-               pesHeader[pesHeaderLen++] = 0x00; // length still unknown
-               pesHeader[pesHeaderLen++] = 0x00; // length still unknown
-
-               if (MpegLevel == phMPEG2) {
-                  pesHeader[pesHeaderLen++] = 0x80;
-                  pesHeader[pesHeaderLen++] = 0x00;
-                  pesHeader[pesHeaderLen++] = 0x00;
-                  }
-               else
-                  pesHeader[pesHeaderLen++] = 0x0F;
-               }
-            // append the first three bytes of the start code
-            pesHeader[pesHeaderLen++] = 0x00;
-            pesHeader[pesHeaderLen++] = 0x00;
-            pesHeader[pesHeaderLen++] = 0x01;
-            // the next packet's payload will begin with the fourth byte of
-            // the start code (= the actual code)
-            Payload = Data;
-            // as there is no length information available, assume the
-            // maximum we can hold in one PES packet
-            packetTodo = maxPacketSize - pesHeaderLen;
-            // go on with finding the picture data
-            state++;
-            }
+         // the above start codes indicate that the current picture is done. So let's
+         // push out the current frame to start a new packet for the next picture.
+         PushOutCurrentFrameAndStartNewPacket(Data, ResultBuffer, Payload, StreamID, MpegLevel);
          break;
     case 0x01 ... 0xAF: // slice start codes
          if (state == findPicture) {
@@ -367,6 +937,73 @@ void cVideoRepacker::HandleStartCode(con
     }
 }
 
+void cVideoRepacker::PushOutCurrentFrameAndStartNewPacket(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
+  // synchronisation is detected some bytes after frame start.
+  const int SkippedBytesLimit = 4;
+
+  if (state == scanPicture) {
+     // picture data has been found so let's push out the current frame.
+     // If the byte count get's negative then the current buffer ends in a
+     // partitial start code that must be stripped off, as it shall be put
+     // in the next packet.
+     PushOutPacket(ResultBuffer, Payload, Data - 3 - Payload);
+     // go on with syncing to the next picture
+     state = syncing;
+     }
+  // when already synced to a picture, just go on collecting data 
+  if (state != syncing)
+     return;
+  // we're synced to a picture so prepare a new packet
+  if (initiallySyncing) // omit report for the typical initial case
+     initiallySyncing = false;
+  else if (skippedBytes > SkippedBytesLimit) // report that syncing dropped some bytes
+     LOG("cVideoRepacker: skipped %d bytes to sync on next picture", skippedBytes - SkippedBytesLimit);
+  skippedBytes = 0;
+  // if there is a PES header available, then use it ...
+  if (pesHeaderBackupLen > 0) {
+     // ISO 13818-1 says:
+     // In the case of video, if a PTS is present in a PES packet header
+     // it shall refer to the access unit containing the first picture start
+     // code that commences in this PES packet. A picture start code commences
+     // in PES packet if the first byte of the picture start code is present
+     // in the PES packet.
+     memcpy(pesHeader, pesHeaderBackup, pesHeaderBackupLen);
+     pesHeaderLen = pesHeaderBackupLen;
+     pesHeaderBackupLen = 0;
+     }
+  else {
+     // ... otherwise create a continuation PES header
+     pesHeaderLen = 0;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x01;
+     pesHeader[pesHeaderLen++] = StreamID; // video stream ID
+     pesHeader[pesHeaderLen++] = 0x00; // length still unknown
+     pesHeader[pesHeaderLen++] = 0x00; // length still unknown
+
+     if (MpegLevel == phMPEG2) {
+        pesHeader[pesHeaderLen++] = 0x80;
+        pesHeader[pesHeaderLen++] = 0x00;
+        pesHeader[pesHeaderLen++] = 0x00;
+        }
+     else
+        pesHeader[pesHeaderLen++] = 0x0F;
+     }
+  // append the first three bytes of the start code
+  pesHeader[pesHeaderLen++] = 0x00;
+  pesHeader[pesHeaderLen++] = 0x00;
+  pesHeader[pesHeaderLen++] = 0x01;
+  // the next packet's payload will begin with the fourth byte of
+  // the start code (= the actual code)
+  Payload = Data;
+  // as there is no length information available, assume the
+  // maximum we can hold in one PES packet
+  packetTodo = maxPacketSize - pesHeaderLen;
+  // go on with finding the picture data
+  state++;
+}
+
 bool cVideoRepacker::ScanDataForStartCodeSlow(const uchar *const Data)
 {
   scanner <<= 8;
@@ -458,14 +1095,19 @@ void cVideoRepacker::Repack(cRingBufferL
   const uchar *data = Data + done;
   // remember start of the data
   const uchar *payload = data;
+  const uchar *NalPayload = payload;
 
   while (todo > 0) {
         // collect number of skipped bytes while syncing
         if (state <= syncing)
            skippedBytes++;
         // did we reach a start code?
-        if (ScanDataForStartCode(data, done, todo))
-           HandleStartCode(data, ResultBuffer, payload, Data[3], mpegLevel);
+        if (ScanDataForStartCode(data, done, todo)) {
+           if (h264Parser)
+              HandleNalUnit(data, ResultBuffer, payload, Data[3], mpegLevel, NalPayload);
+           else
+              HandleStartCode(data, ResultBuffer, payload, Data[3], mpegLevel);
+           }
         // move on
         data++;
         done++;
@@ -567,6 +1209,7 @@ void cVideoRepacker::Repack(cRingBufferL
         memcpy(fragmentData + fragmentLen, payload, bite);
         fragmentLen += bite;
         }
+     CollectNalUnitData(NalPayload, data - NalPayload);
      }
   // report that syncing dropped some bytes
   if (skippedBytes > SkippedBytesLimit) {
@@ -581,13 +1224,22 @@ bool cVideoRepacker::ScanForEndOfPicture
   localScanner <<= 8;
   localScanner |= *Data++;
   // check start codes which follow picture data
-  switch (localScanner) {
-    case 0x00000100: // picture start code
-    case 0x000001B8: // group start code
-    case 0x000001B3: // sequence header code
-    case 0x000001B7: // sequence end code
-         return true;
-    }
+  if (h264Parser) {
+     int nal_unit_type = localScanner & 0x1F;
+     switch (nal_unit_type) {
+       case 9: // access unit delimiter
+            return true;
+       }
+     }
+  else {
+     switch (localScanner) {
+       case 0x00000100: // picture start code
+       case 0x000001B8: // group start code
+       case 0x000001B3: // sequence header code
+       case 0x000001B7: // sequence end code
+            return true;
+       }
+     }
   return false;
 }
 
@@ -601,15 +1253,27 @@ bool cVideoRepacker::ScanForEndOfPicture
         else {
            localScanner = 0x00000100 | *++Data;
            // check start codes which follow picture data
-           switch (localScanner) {
-             case 0x00000100: // picture start code
-             case 0x000001B8: // group start code
-             case 0x000001B3: // sequence header code
-             case 0x000001B7: // sequence end code
-                  Data++;
-                  return true;
-             default:
-                  Data += 3;
+           if (h264Parser) {
+              int nal_unit_type = localScanner & 0x1F;
+              switch (nal_unit_type) {
+                case 9: // access unit delimiter
+                     Data++;
+                     return true;
+                default:
+                     Data += 3;
+                }
+              }
+           else {
+              switch (localScanner) {
+                case 0x00000100: // picture start code
+                case 0x000001B8: // group start code
+                case 0x000001B3: // sequence header code
+                case 0x000001B7: // sequence end code
+                     Data++;
+                     return true;
+                default:
+                     Data += 3;
+                }
              }
            }
         }
@@ -1855,6 +2519,8 @@ void cTS2PES::ts_to_pes(const uint8_t *B
 
 cRemux::cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure)
 {
+  h264 = VPID_IS_H264(VPid);
+  VPid = VPID_FROM_ANY(VPid);
   exitOnFailure = ExitOnFailure;
   isRadio = VPid == 0 || VPid == 1 || VPid == 0x1FFF;
   numUPTerrors = 0;
@@ -1867,7 +2533,7 @@ cRemux::cRemux(int VPid, const int *APid
   if (VPid)
 #define TEST_cVideoRepacker
 #ifdef TEST_cVideoRepacker
-     ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0, 0x00, new cVideoRepacker);
+     ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0, 0x00, new cVideoRepacker(h264));
 #else
      ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0);
 #endif
@@ -1919,6 +2585,23 @@ int cRemux::GetPacketLength(const uchar 
   return -1;
 }
 
+bool cRemux::IsFrameH264(const uchar *Data, int Length)
+{
+  int PesPayloadOffset;
+  const uchar *limit = Data + Length;
+  if (AnalyzePesHeader(Data, Length, PesPayloadOffset) <= phInvalid)
+     return false; // neither MPEG1 nor MPEG2
+
+  Data += PesPayloadOffset + 3; // move to video payload and skip 00 00 01
+  if (Data < limit) {
+     // cVideoRepacker ensures that in case of H264 we will see an access unit delimiter here
+     if (0x01 == Data[-1] && 9 == Data[0] && 0x00 == Data[-2] && 0x00 == Data[-3])
+        return true;
+     }
+
+  return false;
+}
+
 int cRemux::ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType)
 {
   // Scans the video packet starting at Offset and returns its length.
@@ -1937,23 +2620,67 @@ int cRemux::ScanVideoPacket(const uchar 
            if (p[-2] || p[-1] || p[0] != 0x01)
               pLimit = 0; // skip scanning: packet doesn't start with 0x000001
            else {
-              switch (p[1]) {
-                case SC_SEQUENCE:
-                case SC_GROUP:
-                case SC_PICTURE:
-                     break;
-                default: // skip scanning: packet doesn't start a new sequence, group or picture
-                     pLimit = 0;
-                }
+              if (h264) {
+                 int nal_unit_type = p[1] & 0x1F;
+                 switch (nal_unit_type) {
+                   case 9: // access unit delimiter
+                        // when the MSB in p[1] is set (which violates H.264) then this is a hint
+                        // from cVideoRepacker::HandleNalUnit() that this bottom field shall not
+                        // be reported as picture.
+                        if (p[1] & 0x80)
+                           ((uchar *)p)[1] &= ~0x80; // revert the hint and fall through
+                        else
+                           break;
+                   default: // skip scanning: packet doesn't start a new picture
+                        pLimit = 0;
+                   }
+                 }
+              else {
+                 switch (p[1]) {
+                   case SC_SEQUENCE:
+                   case SC_GROUP:
+                   case SC_PICTURE:
+                        break;
+                   default: // skip scanning: packet doesn't start a new sequence, group or picture
+                        pLimit = 0;
+                   }
+                 }
               }
            }
 #endif
         while (p < pLimit && (p = (const uchar *)memchr(p, 0x01, pLimit - p))) {
               if (!p[-2] && !p[-1]) { // found 0x000001
-                 switch (p[1]) {
-                   case SC_PICTURE: PictureType = (p[3] >> 3) & 0x07;
-                                    return Length;
-                   }
+                 if (h264) {
+                    int nal_unit_type = p[1] & 0x1F;
+                    switch (nal_unit_type) {
+                      case 9: { // access unit delimiter
+                              int primary_pic_type = p[2] >> 5;
+                              switch (primary_pic_type) {
+                                case 0: // I
+                                case 3: // SI
+                                case 5: // I, SI
+                                     PictureType = I_FRAME;
+                                     break;
+                                case 1: // I, P
+                                case 4: // SI, SP
+                                case 6: // I, SI, P, SP
+                                     PictureType = P_FRAME;
+                                     break;
+                                case 2: // I, P, B
+                                case 7: // I, SI, P, SP, B
+                                     PictureType = B_FRAME;
+                                     break;
+                                }
+                              return Length;
+                              }
+                      }
+                    }
+                 else {
+                    switch (p[1]) {
+                      case SC_PICTURE: PictureType = (p[3] >> 3) & 0x07;
+                                       return Length;
+                      }
+                    }
                  p += 4; // continue scanning after 0x01ssxxyy
                  }
               else
diff -Nurp ../vdr-1.5.2-orig/remux.h ./remux.h
--- ../vdr-1.5.2-orig/remux.h	2006-03-25 13:27:30.000000000 +0100
+++ ./remux.h	2007-05-06 21:00:53.000000000 +0200
@@ -38,6 +38,7 @@ class cRemux {
 private:
   bool exitOnFailure;
   bool isRadio;
+  bool h264;
   int numUPTerrors;
   bool synced;
   int skipped;
@@ -46,6 +47,7 @@ private:
   cRingBufferLinear *resultBuffer;
   int resultSkipped;
   int GetPid(const uchar *Data);
+  int ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType);
 public:
   cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure = false);
        ///< Creates a new remuxer for the given PIDs. VPid is the video PID, while
@@ -78,7 +80,7 @@ public:
        ///< settings as they are.
   static void SetBrokenLink(uchar *Data, int Length);
   static int GetPacketLength(const uchar *Data, int Count, int Offset);
-  static int ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType);
+  static bool IsFrameH264(const uchar *Data, int Length);
   };
 
 #endif // __REMUX_H
diff -Nurp ../vdr-1.5.2-orig/transfer.c ./transfer.c
--- ../vdr-1.5.2-orig/transfer.c	2007-01-05 11:45:28.000000000 +0100
+++ ./transfer.c	2007-05-06 21:00:53.000000000 +0200
@@ -15,7 +15,7 @@
 // --- cTransfer -------------------------------------------------------------
 
 cTransfer::cTransfer(tChannelID ChannelID, int VPid, const int *APids, const int *DPids, const int *SPids)
-:cReceiver(ChannelID, -1, VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
+:cReceiver(ChannelID, -1, VPID_FROM_ANY(VPid), APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
 ,cThread("transfer")
 {
   ringBuffer = new cRingBufferLinear(TRANSFERBUFSIZE, TS_SIZE * 2, true, "Transfer");
_______________________________________________
vdr mailing list
vdr@xxxxxxxxxxx
http://www.linuxtv.org/cgi-bin/mailman/listinfo/vdr

[Index of Archives]     [Linux Media]     [Asterisk]     [DCCP]     [Netdev]     [Xorg]     [Util Linux NG]     [Xfree86]     [Big List of Linux Books]     [Fedora Users]     [Fedora Women]     [ALSA Devel]     [Linux USB]

  Powered by Linux