Re: [ANNOUNCE] DVB-S2 + H.264 support for VDR-1.5.10

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Hi,

Jan Wagner schrieb:

>> attached you'll find an updated patch for VDR-1.5.10. It replaces the
>> formerly patch for VDR-1.5.9.
> 
> could you seperate the patch into H.264 and DVB-S2 parts again? I use
> DVB-C with it here and do not want to upgrade to drivers with DVB-S2
> support.

I've removed the DVB-S2 part from the original patch. The result is
attached. Please give it a try.

Bye.
-- 
Dipl.-Inform. (FH) Reinhard Nissl
mailto:rnissl@xxxxxx
diff -Nup ../vdr-1.5.10-orig/Makefile ./Makefile
--- ../vdr-1.5.10-orig/Makefile	2007-10-13 11:26:40.000000000 +0200
+++ ./Makefile	2007-10-14 17:22:27.000000000 +0200
@@ -41,7 +42,7 @@ OBJS = audio.o channels.o ci.o config.o 
        lirc.o menu.o menuitems.o nit.o osdbase.o osd.o pat.o player.o plugin.o rcu.o\
        receiver.o recorder.o recording.o remote.o remux.o ringbuffer.o sdt.o sections.o shutdown.o\
        skinclassic.o skins.o skinsttng.o sources.o spu.o status.o svdrp.o themes.o thread.o\
-       timers.o tools.o transfer.o vdr.o videodir.o
+       timers.o tools.o transfer.o vdr.o videodir.o h264parser.o
 
 ifndef NO_KBD
 DEFINES += -DREMOTE_KBD
diff -Nup ../vdr-1.5.10-orig/channels.h ./channels.h
--- ../vdr-1.5.10-orig/channels.h	2007-09-02 12:23:11.000000000 +0200
+++ ./channels.h	2007-10-14 17:22:27.000000000 +0200
@@ -47,6 +47,16 @@
 #define CA_ENCRYPTED_MIN 0x0100
 #define CA_ENCRYPTED_MAX 0xFFFF
 
+// VPID can be in the range 0...8191. Offsets of 10000 are used to indicate special video codings.
+#define VPID_OFFSET_BASE         10000
+#define VPID_FROM_ANY(pid)       ((pid) % VPID_OFFSET_BASE) // returns the plain VPID
+#define VPID_TO_XXX(pid, offset) (pid + offset)
+#define VPID_IS_XXX(pid, offset) ((pid - VPID_FROM_ANY(pid)) == offset)
+// 1. special video coding: H.264
+#define VPID_OFFSET_H264         (1 * VPID_OFFSET_BASE)
+#define VPID_TO_H264(pid)        VPID_TO_XXX(pid, VPID_OFFSET_H264)
+#define VPID_IS_H264(pid)        VPID_IS_XXX(pid, VPID_OFFSET_H264)
+
 struct tChannelParameterMap {
   int userValue;
   int driverValue;
diff -Nup ../vdr-1.5.10-orig/ci.c ./ci.c
--- ../vdr-1.5.10-orig/ci.c	2007-04-30 15:02:49.000000000 +0200
+++ ./ci.c	2007-08-31 21:08:30.000000000 +0200
@@ -1880,7 +1880,7 @@ void cCamSlot::AddChannel(const cChannel
   source = Channel->Source();
   transponder = Channel->Transponder();
   if (Channel->Ca() >= CA_ENCRYPTED_MIN) {
-     AddPid(Channel->Sid(), Channel->Vpid(), STREAM_TYPE_VIDEO);
+     AddPid(Channel->Sid(), VPID_FROM_ANY(Channel->Vpid()), STREAM_TYPE_VIDEO);
      for (const int *Apid = Channel->Apids(); *Apid; Apid++)
          AddPid(Channel->Sid(), *Apid, STREAM_TYPE_AUDIO);
      for (const int *Dpid = Channel->Dpids(); *Dpid; Dpid++)
@@ -1901,7 +1901,7 @@ bool cCamSlot::CanDecrypt(const cChannel
   if (cas && cas->RepliesToQuery()) {
      cCiCaPmt CaPmt(CPCI_QUERY, Channel->Source(), Channel->Transponder(), Channel->Sid(), GetCaSystemIds());
      CaPmt.SetListManagement(CPLM_ADD); // WORKAROUND: CPLM_ONLY doesn't work with Alphacrypt 3.09 (deletes existing CA_PMTs)
-     CaPmt.AddPid(Channel->Vpid(), STREAM_TYPE_VIDEO);
+     CaPmt.AddPid(VPID_FROM_ANY(Channel->Vpid()), STREAM_TYPE_VIDEO);
      for (const int *Apid = Channel->Apids(); *Apid; Apid++)
          CaPmt.AddPid(*Apid, STREAM_TYPE_AUDIO);
      for (const int *Dpid = Channel->Dpids(); *Dpid; Dpid++)
diff -Nup ../vdr-1.5.10-orig/device.c ./device.c
--- ../vdr-1.5.10-orig/device.c	2007-10-14 15:09:19.000000000 +0200
+++ ./device.c	2007-10-14 17:25:56.000000000 +0200
@@ -821,7 +821,7 @@ eSetChannelResult cDevice::SetChannel(co
            }
         for (int i = 0; i < MAXSPIDS; i++)
             SetAvailableTrack(ttSubtitle, i, Channel->Spid(i), Channel->Slang(i));
-        if (!NeedsTransferMode)
+        if (!NeedsTransferMode || GetCurrentAudioTrack() == ttNone)
            EnsureAudioTrack(true);
         EnsureSubtitleTrack();
         }
diff -Nup ../vdr-1.5.10-orig/dvbdevice.c ./dvbdevice.c
--- ../vdr-1.5.10-orig/dvbdevice.c	2007-10-14 14:56:03.000000000 +0200
+++ ./dvbdevice.c	2007-10-14 17:22:27.000000000 +0200
@@ -763,7 +811,7 @@ bool cDvbDevice::ProvidesChannel(const c
      result = hasPriority;
      if (Priority >= 0 && Receiving(true)) {
         if (dvbTuner->IsTunedTo(Channel)) {
-           if (Channel->Vpid() && !HasPid(Channel->Vpid()) || Channel->Apid(0) && !HasPid(Channel->Apid(0))) {
+           if (Channel->Vpid() && !HasPid(VPID_FROM_ANY(Channel->Vpid())) || Channel->Apid(0) && !HasPid(Channel->Apid(0))) {
 #ifdef DO_MULTIPLE_RECORDINGS
               if (CamSlot() && Channel->Ca() >= CA_ENCRYPTED_MIN) {
                  if (CamSlot()->CanDecrypt(Channel))
@@ -799,7 +847,7 @@ bool cDvbDevice::IsTunedToTransponder(co
 bool cDvbDevice::SetChannelDevice(const cChannel *Channel, bool LiveView)
 {
   int apid = Channel->Apid(0);
-  int vpid = Channel->Vpid();
+  int vpid = VPID_FROM_ANY(Channel->Vpid());
   int dpid = Channel->Dpid(0);
 
   bool DoTune = !dvbTuner->IsTunedTo(Channel);
@@ -860,7 +908,7 @@ bool cDvbDevice::SetChannelDevice(const 
      CHECK(ioctl(fd_audio, AUDIO_SET_AV_SYNC, true));
      }
   else if (StartTransferMode)
-     cControl::Launch(new cTransferControl(this, Channel->GetChannelID(), vpid, Channel->Apids(), Channel->Dpids(), Channel->Spids()));
+     cControl::Launch(new cTransferControl(this, Channel->GetChannelID(), Channel->Vpid(), Channel->Apids(), Channel->Dpids(), Channel->Spids()));
 
   return true;
 }
diff -Nup ../vdr-1.5.10-orig/dvbplayer.c ./dvbplayer.c
--- ../vdr-1.5.10-orig/dvbplayer.c	2007-10-13 14:20:58.000000000 +0200
+++ ./dvbplayer.c	2007-10-14 17:22:27.000000000 +0200
@@ -182,8 +182,8 @@ bool cNonBlockingFileReader::WaitForData
 
 #define PLAYERBUFSIZE  MEGABYTE(1)
 
-// The number of frames to back up when resuming an interrupted replay session:
-#define RESUMEBACKUP (10 * FRAMESPERSEC)
+// The number of seconds to back up when resuming an interrupted replay session:
+#define RESUMEBACKUP 10
 
 class cDvbPlayer : public cPlayer, cThread {
 private:
@@ -196,6 +196,7 @@ private:
   cFileName *fileName;
   cIndexFile *index;
   cUnbufferedFile *replayFile;
+  int framesPerSec;
   bool eof;
   bool firstPacket;
   ePlayModes playMode;
@@ -225,6 +226,7 @@ public:
   void Goto(int Position, bool Still = false);
   virtual bool GetIndex(int &Current, int &Total, bool SnapToIFrame = false);
   virtual bool GetReplayMode(bool &Play, bool &Forward, int &Speed);
+  int GetFramesPerSec(void) { return framesPerSec; }
   };
 
 #define MAX_VIDEO_SLOWMOTION 63 // max. arg to pass to VIDEO_SLOWMOTION // TODO is this value correct?
@@ -253,6 +255,7 @@ cDvbPlayer::cDvbPlayer(const char *FileN
   replayFile = fileName->Open();
   if (!replayFile)
      return;
+  framesPerSec = replayFile->GetFramesPerSec();
   ringBuffer = new cRingBufferFrame(PLAYERBUFSIZE);
   // Create the index file:
   index = new cIndexFile(FileName, false);
@@ -341,7 +344,7 @@ bool cDvbPlayer::Save(void)
   if (index) {
      int Index = writeIndex;
      if (Index >= 0) {
-        Index -= RESUMEBACKUP;
+        Index -= RESUMEBACKUP * GetFramesPerSec();
         if (Index > 0)
            Index = index->GetNextIFrame(Index, false);
         else
@@ -371,7 +374,7 @@ void cDvbPlayer::Action(void)
 
   readIndex = Resume();
   if (readIndex >= 0)
-     isyslog("resuming replay at index %d (%s)", readIndex, *IndexToHMSF(readIndex, true));
+     isyslog("resuming replay at index %d (%s)", readIndex, *IndexToHMSF(readIndex, true, GetFramesPerSec()));
 
   nonBlockingFileReader = new cNonBlockingFileReader;
   int Length = 0;
@@ -673,7 +676,7 @@ void cDvbPlayer::SkipSeconds(int Seconds
      Empty();
      int Index = writeIndex;
      if (Index >= 0) {
-        Index = max(Index + Seconds * FRAMESPERSEC, 0);
+        Index = max(Index + Seconds * GetFramesPerSec(), 0);
         if (Index > 0)
            Index = index->GetNextIFrame(Index, false, NULL, NULL, NULL, true);
         if (Index >= 0)
@@ -720,7 +723,8 @@ void cDvbPlayer::Goto(int Index, bool St
               b[r++] = 0x00;
               b[r++] = 0x00;
               b[r++] = 0x01;
-              b[r++] = 0xB7;
+              b[r] = (cRemux::IsFrameH264(b, r) ? 10 : 0xB7);
+              r++;
               }
            DeviceStillPicture(b, r);
            }
@@ -840,3 +844,10 @@ void cDvbPlayerControl::Goto(int Positio
   if (player)
      player->Goto(Position, Still);
 }
+
+int cDvbPlayerControl::GetFramesPerSec()
+{
+  if (player)
+     return player->GetFramesPerSec();
+  return FRAMESPERSEC;
+}
diff -Nup ../vdr-1.5.10-orig/dvbplayer.h ./dvbplayer.h
--- ../vdr-1.5.10-orig/dvbplayer.h	2002-06-23 12:13:51.000000000 +0200
+++ ./dvbplayer.h	2007-08-28 22:25:08.000000000 +0200
@@ -54,6 +54,8 @@ public:
   void Goto(int Index, bool Still = false);
        // Positions to the given index and displays that frame as a still picture
        // if Still is true.
+  int GetFramesPerSec();
+       // Returns the number of frames per second for the current recording.
   };
 
 #endif //__DVBPLAYER_H
diff -Nup ../vdr-1.5.10-orig/h264parser.c ./h264parser.c
--- ../vdr-1.5.10-orig/h264parser.c	1970-01-01 01:00:00.000000000 +0100
+++ ./h264parser.c	2007-08-28 23:28:50.000000000 +0200
@@ -0,0 +1,461 @@
+/*
+ * h264parser.c: a minimalistic H.264 video stream parser
+ *
+ * See the main source file 'vdr.c' for copyright information and
+ * how to reach the author.
+ *
+ * The code was originally written by Reinhard Nissl <rnissl@xxxxxx>,
+ * and adapted to the VDR coding style by Klaus.Schmidinger@xxxxxxxxxxx
+ */
+
+#include "tools.h"
+#include "h264parser.h"
+
+namespace H264
+{
+  // --- cContext ------------------------------------------------------------
+
+  int cContext::GetFramesPerSec(void) const
+  {
+    const cSequenceParameterSet *SPS = ActiveSPS();
+    const cSliceHeader *SH = CurrentSlice();
+    if (!SH || !SPS->timing_info_present_flag || !SPS->time_scale || !SPS->num_units_in_tick)
+       return -1;
+    uint32_t DeltaTfiDivisor;
+    if (SPS->pic_struct_present_flag) {
+       if (!SPS->pic_timing_sei.Defined())
+          return -1;
+       switch (SPS->pic_timing_sei.pic_struct) {
+         case 1:
+         case 2:
+              DeltaTfiDivisor = 1;
+              break;
+         case 0:
+         case 3:
+         case 4:
+              DeltaTfiDivisor = 2;
+              break;
+         case 5:
+         case 6:
+              DeltaTfiDivisor = 3;
+              break;
+         case 7:
+              DeltaTfiDivisor = 4;
+              break;
+         case 8:
+              DeltaTfiDivisor = 6;
+              break;
+         default:
+              return -1;
+         }
+       }
+    else if (!SH->field_pic_flag)
+       DeltaTfiDivisor = 2;
+    else
+       DeltaTfiDivisor = 1;
+
+    double FPS = (double)SPS->time_scale / SPS->num_units_in_tick / DeltaTfiDivisor / (SH->field_pic_flag ? 2 : 1);
+    int FramesPerSec = (int)FPS;
+    if ((FPS - FramesPerSec) >= 0.5)
+       FramesPerSec++;
+    return FramesPerSec;
+  }
+
+  // --- cSimpleBuffer -------------------------------------------------------
+
+  cSimpleBuffer::cSimpleBuffer(int Size)
+  {
+    size = Size;
+    data = new uchar[size];
+    avail = 0;
+    gotten = 0;
+  }
+
+  cSimpleBuffer::~cSimpleBuffer()
+  {
+    delete data;
+  }
+
+  int cSimpleBuffer::Put(const uchar *Data, int Count)
+  {
+    if (Count < 0) {
+       if (avail + Count < 0)
+          Count = 0 - avail;
+       if (avail + Count < gotten)
+          Count = gotten - avail;
+       avail += Count;
+       return Count;
+       }
+    if (avail + Count > size)
+       Count = size - avail;
+    memcpy(data + avail, Data, Count);
+    avail += Count;
+    return Count;
+  }
+
+  uchar *cSimpleBuffer::Get(int &Count)
+  {
+    Count = gotten = avail;
+    return data;
+  }
+
+  void cSimpleBuffer::Del(int Count)
+  {
+    if (Count < 0)
+       return;
+    if (Count > gotten) {
+       esyslog("ERROR: invalid Count in H264::cSimpleBuffer::Del: %d (limited to %d)", Count, gotten);
+       Count = gotten;
+       }
+    if (Count < avail)
+       memmove(data, data + Count, avail - Count);
+    avail -= Count;
+    gotten = 0;
+  }
+
+  void cSimpleBuffer::Clear(void)
+  {
+    avail = gotten = 0;
+  }
+
+  // --- cParser -------------------------------------------------------------
+
+  cParser::cParser(bool OmitPicTiming)
+    : nalUnitDataBuffer(1000)
+  {
+    // the above buffer size of 1000 bytes wont hold a complete NAL unit but
+    // should be sufficient for the relevant part used for parsing.
+    omitPicTiming = OmitPicTiming; // only necessary to determine frames per second
+    Reset();
+  }
+
+  void cParser::Reset(void)
+  {
+    context = cContext();
+    nalUnitDataBuffer.Clear();
+    syncing = true;
+  }
+
+  void cParser::ParseSequenceParameterSet(uint8_t *Data, int Count)
+  {
+    cSequenceParameterSet SPS;
+
+    cBitReader br(Data + 1, Count - 1);
+    uint32_t profile_idc = br.u(8);
+    /* uint32_t constraint_set0_flag = */ br.u(1);
+    /* uint32_t constraint_set1_flag = */ br.u(1);
+    /* uint32_t constraint_set2_flag = */ br.u(1);
+    /* uint32_t constraint_set3_flag = */ br.u(1);
+    /* uint32_t reserved_zero_4bits = */ br.u(4);
+    /* uint32_t level_idc = */ br.u(8);
+    SPS.seq_parameter_set_id = br.ue();
+    if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || profile_idc == 144) {
+       uint32_t chroma_format_idc = br.ue();
+       if (chroma_format_idc == 3) {
+          /* uint32_t residual_colour_transform_flag = */ br.u(1);
+          }
+       /* uint32_t bit_depth_luma_minus8 = */ br.ue();
+       /* uint32_t bit_depth_chroma_minus8 = */ br.ue();
+       /* uint32_t qpprime_y_zero_transform_bypass_flag = */ br.u(1);
+       uint32_t seq_scaling_matrix_present_flag = br.u(1);
+       if (seq_scaling_matrix_present_flag) {
+          for (int i = 0; i < 8; i++) {
+              uint32_t seq_scaling_list_present_flag = br.u(1);
+              if (seq_scaling_list_present_flag) {
+                 int sizeOfScalingList = (i < 6) ? 16 : 64;
+                 int lastScale = 8;
+                 int nextScale = 8;
+                 for (int j = 0; j < sizeOfScalingList; j++) {
+                     if (nextScale != 0) {
+                        int32_t delta_scale = br.se();
+                        nextScale = (lastScale + delta_scale + 256) % 256;
+                        }
+                     lastScale = (nextScale == 0) ? lastScale : nextScale;
+                     }
+                 }
+              }
+          }
+       }
+    SPS.log2_max_frame_num_minus4(br.ue());
+    SPS.pic_order_cnt_type = br.ue();
+    if (SPS.pic_order_cnt_type == 0)
+       SPS.log2_max_pic_order_cnt_lsb_minus4(br.ue());
+    else if (SPS.pic_order_cnt_type == 1) {
+       SPS.delta_pic_order_always_zero_flag = br.u(1);
+       /* int32_t offset_for_non_ref_pic = */ br.se();
+       /* int32_t offset_for_top_to_bottom_field = */ br.se();
+       uint32_t num_ref_frames_in_pic_order_cnt_cycle = br.ue();
+       for (uint32_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++) {
+           /* int32_t offset_for_ref_frame = */ br.se();
+           }
+       }
+    /* uint32_t num_ref_frames = */ br.ue();
+    /* uint32_t gaps_in_frame_num_value_allowed_flag = */ br.u(1);
+    /* uint32_t pic_width_in_mbs_minus1 = */ br.ue();
+    /* uint32_t pic_height_in_map_units_minus1 = */ br.ue();
+    SPS.frame_mbs_only_flag = br.u(1);
+
+    if (!omitPicTiming) {
+       if (!SPS.frame_mbs_only_flag) {
+          /* uint32_t mb_adaptive_frame_field_flag = */ br.u(1);
+          }
+       /* uint32_t direct_8x8_inference_flag = */ br.u(1);
+       uint32_t frame_cropping_flag = br.u(1);
+       if (frame_cropping_flag) {
+          /* uint32_t frame_crop_left_offset = */ br.ue();
+          /* uint32_t frame_crop_right_offset = */ br.ue();
+          /* uint32_t frame_crop_top_offset = */ br.ue();
+          /* uint32_t frame_crop_bottom_offset = */ br.ue();
+          }
+       uint32_t vui_parameters_present_flag = br.u(1);
+       if (vui_parameters_present_flag) {
+          uint32_t aspect_ratio_info_present_flag = br.u(1);
+          if (aspect_ratio_info_present_flag) {
+             uint32_t aspect_ratio_idc = br.u(8);
+             const uint32_t Extended_SAR = 255;
+             if (aspect_ratio_idc == Extended_SAR) {
+                /* uint32_t sar_width = */ br.u(16);
+                /* uint32_t sar_height = */ br.u(16);
+                }
+             }
+          uint32_t overscan_info_present_flag = br.u(1);
+          if (overscan_info_present_flag) {
+             /* uint32_t overscan_appropriate_flag = */ br.u(1);
+             }
+          uint32_t video_signal_type_present_flag = br.u(1);
+          if (video_signal_type_present_flag) {
+             /* uint32_t video_format = */ br.u(3);
+             /* uint32_t video_full_range_flag = */ br.u(1);
+             uint32_t colour_description_present_flag = br.u(1);
+             if (colour_description_present_flag) {
+                /* uint32_t colour_primaries = */ br.u(8);
+                /* uint32_t transfer_characteristics = */ br.u(8);
+                /* uint32_t matrix_coefficients = */ br.u(8);
+                }
+             }
+          uint32_t chroma_loc_info_present_flag = br.u(1);
+          if (chroma_loc_info_present_flag) {
+             /* uint32_t chroma_sample_loc_type_top_field = */ br.ue();
+             /* uint32_t chroma_sample_loc_type_bottom_field = */ br.ue();
+             }
+          SPS.timing_info_present_flag = br.u(1);
+          if (SPS.timing_info_present_flag) {
+             SPS.num_units_in_tick = br.u(32);
+             SPS.time_scale = br.u(32);
+             SPS.fixed_frame_rate_flag = br.u(1);
+             }
+          SPS.nal_hrd_parameters_present_flag = br.u(1);
+          if (SPS.nal_hrd_parameters_present_flag)
+             hrd_parameters(SPS, br);
+          SPS.vcl_hrd_parameters_present_flag = br.u(1);
+          if (SPS.vcl_hrd_parameters_present_flag)
+             hrd_parameters(SPS, br);
+          if (SPS.nal_hrd_parameters_present_flag || SPS.vcl_hrd_parameters_present_flag) {
+             /* uint32_t low_delay_hrd_flag = */ br.u(1);
+             }
+          SPS.pic_struct_present_flag = br.u(1);
+          }
+       }
+
+    context.Define(SPS);
+  }
+
+  void cParser::hrd_parameters(cSequenceParameterSet &SPS, cBitReader &br)
+  {
+    uint32_t cpb_cnt_minus1 = br.ue();
+    /* uint32_t bit_rate_scale = */ br.u(4);
+    /* uint32_t cpb_size_scale = */ br.u(4);
+    for (uint32_t i = 0; i <= cpb_cnt_minus1; i++) {
+        /* uint32_t bit_rate_value_minus1 = */ br.ue();
+        /* uint32_t cpb_size_value_minus1 = */ br.ue();
+        /* uint32_t cbr_flag = */ br.u(1);
+        }
+    /* uint32_t initial_cpb_removal_delay_length_minus1 = */ br.u(5);
+    SPS.cpb_removal_delay_length_minus1(br.u(5));
+    SPS.dpb_output_delay_length_minus1(br.u(5));
+    /* uint32_t time_offset_length = */ br.u(5);
+  }
+
+  void cParser::ParsePictureParameterSet(uint8_t *Data, int Count)
+  {
+    cPictureParameterSet PPS;
+
+    cBitReader br(Data + 1, Count - 1);
+    PPS.pic_parameter_set_id = br.ue();
+    PPS.seq_parameter_set_id = br.ue();
+    /* uint32_t entropy_coding_mode_flag = */ br.u(1);
+    PPS.pic_order_present_flag = br.u(1);
+
+    context.Define(PPS);
+  }
+
+  void cParser::ParseSlice(uint8_t *Data, int Count)
+  {
+    cSliceHeader SH;
+
+    cBitReader br(Data + 1, Count - 1);
+    SH.nal_ref_idc(Data[0] >> 5);
+    SH.nal_unit_type(Data[0] & 0x1F);
+    /* uint32_t first_mb_in_slice = */ br.ue();
+    SH.slice_type = br.ue();
+    SH.pic_parameter_set_id = br.ue();
+
+    context.ActivatePPS(SH.pic_parameter_set_id);
+    const cSequenceParameterSet *SPS = context.ActiveSPS();
+
+    SH.frame_num = br.u(SPS->log2_max_frame_num());
+    if (!SPS->frame_mbs_only_flag) {
+       SH.field_pic_flag = br.u(1);
+       if (SH.field_pic_flag)
+          SH.bottom_field_flag = br.u(1);
+       }
+    if (SH.nal_unit_type() == 5)
+       SH.idr_pic_id = br.ue();
+    if (SPS->pic_order_cnt_type == 0) {
+       SH.pic_order_cnt_lsb = br.u(SPS->log2_max_pic_order_cnt_lsb());
+       const cPictureParameterSet *PPS = context.ActivePPS();
+       if (PPS->pic_order_present_flag && !SH.field_pic_flag)
+          SH.delta_pic_order_cnt_bottom = br.se();
+       }
+    if (SPS->pic_order_cnt_type == 1 && !SPS->delta_pic_order_always_zero_flag) {
+       SH.delta_pic_order_cnt[0] = br.se();
+       const cPictureParameterSet *PPS = context.ActivePPS();
+       if (PPS->pic_order_present_flag && !SH.field_pic_flag)
+          SH.delta_pic_order_cnt[1] = br.se();
+       }
+
+    context.Define(SH);
+  }
+
+  void cParser::ParseSEI(uint8_t *Data, int Count)
+  {
+    // currently only used to determine frames per second
+    if (omitPicTiming)
+       return;
+    cBitReader br(Data + 1, Count - 1);
+    do
+      sei_message(br);
+    while (br.GetBytesAvail());
+  }
+
+  void cParser::sei_message(cBitReader &br)
+  {
+    uint32_t payloadType = 0;
+    while (1) {
+          uint32_t last_payload_type_byte = br.u(8);
+          payloadType += last_payload_type_byte;
+          if (last_payload_type_byte != 0xFF)
+             break;
+          }
+    uint32_t payloadSize = 0;
+    while (1) {
+          uint32_t last_payload_size_byte = br.u(8);
+          payloadSize += last_payload_size_byte;
+          if (last_payload_size_byte != 0xFF)
+             break;
+          }
+    sei_payload(payloadType, payloadSize, br);
+  }
+
+  void cParser::sei_payload(uint32_t payloadType, uint32_t payloadSize, cBitReader &br)
+  {
+    const cBitReader::cBookMark BookMark = br.BookMark();
+    switch (payloadType) {
+      case 0:
+           buffering_period(payloadSize, br);
+           break;
+      case 1:
+           pic_timing(payloadSize, br);
+           break;
+      }
+    // instead of dealing with trailing bits in each message
+    // go back to start of message and skip it completely
+    br.BookMark(BookMark);
+    reserved_sei_message(payloadSize, br);
+  }
+
+  void cParser::buffering_period(uint32_t payloadSize, cBitReader &br)
+  {
+    uint32_t seq_parameter_set_id = br.ue();
+
+    context.ActivateSPS(seq_parameter_set_id);
+  }
+
+  void cParser::pic_timing(uint32_t payloadSize, cBitReader &br)
+  {
+    cPictureTiming PT;
+
+    const cSequenceParameterSet *SPS = context.ActiveSPS();
+    if (!SPS)
+       return;
+    uint32_t CpbDpbDelaysPresentFlag = SPS->nal_hrd_parameters_present_flag || SPS->vcl_hrd_parameters_present_flag;
+    if (CpbDpbDelaysPresentFlag) {
+       /* uint32_t cpb_removal_delay = */ br.u(SPS->cpb_removal_delay_length());
+       /* uint32_t dpb_output_delay = */ br.u(SPS->dpb_output_delay_length());
+       }
+    if (SPS->pic_struct_present_flag) {
+       PT.pic_struct = br.u(4);
+       }
+
+    context.Define(PT);
+  }
+
+  void cParser::reserved_sei_message(uint32_t payloadSize, cBitReader &br)
+  {
+    for (uint32_t i = 0; i < payloadSize; i++) {
+        /* uint32_t reserved_sei_message_payload_byte = */ br.u(8);
+        }
+  }
+
+  void cParser::PutNalUnitData(const uchar *Data, int Count)
+  {
+    int n = nalUnitDataBuffer.Put(Data, Count);
+    // typically less than a complete NAL unit are needed for parsing the
+    // relevant data, so simply ignore the overflow condition.
+    if (false && n != Count)
+       esyslog("ERROR: H264::cParser::PutNalUnitData(): NAL unit data buffer overflow");
+  }
+
+  void cParser::Process()
+  {
+    // nalUnitDataBuffer contains the head of the current NAL unit -- let's parse it 
+    int Count = 0;
+    uchar *Data = nalUnitDataBuffer.Get(Count);
+    if (Data && Count >= 4) {
+       if (Data[0] == 0x00 && Data[1] == 0x00 && Data[2] == 0x01) {
+          int nal_unit_type = Data[3] & 0x1F;
+          try {
+              switch (nal_unit_type) {
+                case 1: // coded slice of a non-IDR picture
+                case 2: // coded slice data partition A
+                case 5: // coded slice of an IDR picture
+                     ParseSlice(Data + 3, Count - 3);
+                     break;
+                case 6: // supplemental enhancement information (SEI)
+                     ParseSEI(Data + 3, Count - 3);
+                     break;
+                case 7: // sequence parameter set
+                     syncing = false; // from now on, we should get reliable results
+                     ParseSequenceParameterSet(Data + 3, Count - 3);
+                     break;
+                case 8: // picture parameter set
+                     ParsePictureParameterSet(Data + 3, Count - 3);
+                     break;
+                }
+              }
+          catch (cException *e) {
+              if (!syncing) // suppress typical error messages while syncing
+                 esyslog(e->Message());
+              delete e;
+              }
+          }
+       else if (!syncing)
+          esyslog("ERROR: H264::cParser::Process(): NAL unit data buffer content is invalid");
+       }
+    else if (!syncing)
+       esyslog("ERROR: H264::cParser::Process(): NAL unit data buffer content is too short");
+    // reset the buffer for the next NAL unit
+    nalUnitDataBuffer.Clear();
+  }
+}
+
diff -Nup ../vdr-1.5.10-orig/h264parser.h ./h264parser.h
--- ../vdr-1.5.10-orig/h264parser.h	1970-01-01 01:00:00.000000000 +0100
+++ ./h264parser.h	2007-08-28 23:18:31.000000000 +0200
@@ -0,0 +1,397 @@
+/*
+ * h264parser.h: a minimalistic H.264 video stream parser
+ *
+ * See the main source file 'vdr.c' for copyright information and
+ * how to reach the author.
+ */
+
+#ifndef __H264PARSER_H
+#define __H264PARSER_H
+
+namespace H264
+{
+  // --- cException ----------------------------------------------------------
+
+  class cException {
+  private:
+    cString message;
+  public:
+    cException(const cString &Message) { message = Message; }
+    const cString &Message(void) const { return message; }
+  };
+
+  // --- cBitReader ----------------------------------------------------------
+
+  class cBitReader {
+  public:
+    class cBookMark {
+    private:
+      uint8_t *data;
+      int count;
+      uint32_t bits;
+      uint32_t bitsAvail;
+      int countZeros;
+      cBookMark(void) {}
+      friend class cBitReader;
+    };
+  private:
+    cBookMark bm;
+    uint8_t NextByte(void);
+    uint32_t ReadBits(uint32_t n);
+  public:
+    cBitReader(uint8_t *Data, int Count);
+    uint32_t u(uint32_t n) { return ReadBits(n); } // read n bits as unsigned number
+    uint32_t ue(void); // read Exp-Golomb coded unsigned number
+    int32_t se(void); // read Exp-Golomb coded signed number
+    uint32_t GetBitsAvail(void) { return (bm.bitsAvail & 0x07); }
+    bool GetBytesAvail(void) { return (bm.count > 0); }
+    const cBookMark BookMark(void) const { return bm; }
+    void BookMark(const cBookMark &b) { bm = b; }
+  };
+
+  inline cBitReader::cBitReader(unsigned char *Data, int Count)
+  {
+    bm.data = Data;
+    bm.count = Count;
+    bm.bitsAvail = 0;
+    bm.countZeros = 0;
+  }
+
+  inline uint8_t cBitReader::NextByte(void)
+  {
+    if (bm.count < 1) // there is no more data left in this NAL unit
+       throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+    // detect 00 00 00, 00 00 01 and 00 00 03 and handle them
+    if (*bm.data == 0x00) {
+       if (bm.countZeros >= 3) // 00 00 00: the current NAL unit should have been terminated already before this sequence
+          throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+       // increase the zero counter as we have a zero byte
+       bm.countZeros++;
+       }
+    else {
+       if (bm.countZeros >= 2) {
+          if (*bm.data == 0x01) // 00 00 01: the current NAL unit should have been terminated already before this sequence
+             throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+          if (*bm.data == 0x03) {
+             // 00 00 03 xx: the emulation prevention byte 03 needs to be removed and xx must be returned
+             if (bm.count < 2)
+                throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+             // drop 03 and xx will be returned below
+             bm.count--;
+             bm.data++;
+             }
+          }
+       // reset the zero counter as we had a non zero byte
+       bm.countZeros = 0;
+       }
+    bm.count--;
+    return *bm.data++;
+  }
+
+  inline uint32_t cBitReader::ReadBits(uint32_t n)
+  {
+    // fill the "shift register" bits with sufficient data
+    while (n > bm.bitsAvail) {
+          bm.bits <<= 8;
+          bm.bits |= NextByte();
+          bm.bitsAvail += 8;
+          if (bm.bitsAvail > 24) { // a further turn will overflow bitbuffer
+             if (n <= bm.bitsAvail)
+                break; // service non overflowing request
+             if (n <= 32) // split overflowing reads into concatenated reads 
+                return (ReadBits(16) << 16) | ReadBits(n - 16);
+             // cannot read more than 32 bits at once
+             throw new cException("ERROR: H264::cBitReader::ReadBits(): bitbuffer overflow");
+             }
+          }
+    // return n most significant bits
+    bm.bitsAvail -= n;
+    return (bm.bits >> bm.bitsAvail) & (((uint32_t)1 << n) - 1);
+  }
+
+  inline uint32_t cBitReader::ue(void)
+  {
+    // read and decode an Exp-Golomb coded unsigned number
+    //
+    // bitstring             resulting number
+    //       1               0
+    //     0 1 x             1 ... 2
+    //   0 0 1 x y           3 ... 6
+    // 0 0 0 1 x y z         7 ... 14
+    // ...
+    int LeadingZeroBits = 0;
+    while (ReadBits(1) == 0)
+          LeadingZeroBits++;
+    if (LeadingZeroBits == 0)
+       return 0;
+    if (LeadingZeroBits >= 32)
+       throw new cException("ERROR: H264::cBitReader::ue(): overflow");
+    return ((uint32_t)1 << LeadingZeroBits) - 1 + ReadBits(LeadingZeroBits);
+  }
+
+  inline int32_t cBitReader::se(void)
+  {
+    // read and decode an Exp-Golomb coded signed number
+    //
+    // unsigned value       resulting signed value
+    // 0                     0
+    // 1                    +1
+    // 2                    -1
+    // 3                    +2
+    // 4                    -2
+    // ...
+    uint32_t r = ue();
+    if (r > 0xFFFFFFFE)
+       throw new cException("ERROR: H264::cBitReader::se(): overflow");
+    return (1 - 2 * (r & 1)) * ((r + 1) / 2);
+  }
+
+  // --- cPictureTiming ------------------------------------------------------
+
+  class cPictureTiming {
+  private:
+    friend class cContext;
+    bool defined;
+  public:
+    cPictureTiming(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) const { return defined; }
+    uint32_t pic_struct;
+  };
+
+  // --- cSequenceParameterSet -----------------------------------------------
+
+  class cSequenceParameterSet {
+  private:
+    friend class cContext;
+    bool defined;
+    uint32_t log2MaxFrameNum;
+    uint32_t log2MaxPicOrderCntLsb;
+    uint32_t cpbRemovalDelayLength;
+    uint32_t dpbOutputDelayLength;
+  public:
+    cSequenceParameterSet(void);
+    bool Defined(void) { return defined; }
+    void log2_max_frame_num_minus4(uint32_t Value) { log2MaxFrameNum = Value + 4; }
+    uint32_t log2_max_frame_num_minus4(void) const { return log2MaxFrameNum - 4; }
+    uint32_t log2_max_frame_num(void) const { return log2MaxFrameNum; }
+    void log2_max_pic_order_cnt_lsb_minus4(uint32_t Value) { log2MaxPicOrderCntLsb = Value + 4; }
+    uint32_t log2_max_pic_order_cnt_lsb_minus4(void) const { return log2MaxPicOrderCntLsb - 4; }
+    uint32_t log2_max_pic_order_cnt_lsb(void) const { return log2MaxPicOrderCntLsb; }
+    void cpb_removal_delay_length_minus1(uint32_t Value) { cpbRemovalDelayLength = Value + 1; }
+    uint32_t cpb_removal_delay_length_minus1(void) const { return cpbRemovalDelayLength - 1; }
+    uint32_t cpb_removal_delay_length(void) const { return cpbRemovalDelayLength; }
+    void dpb_output_delay_length_minus1(uint32_t Value) { dpbOutputDelayLength = Value + 1; }
+    uint32_t dpb_output_delay_length_minus1(void) const { return dpbOutputDelayLength - 1; }
+    uint32_t dpb_output_delay_length(void) const { return dpbOutputDelayLength; }
+    uint32_t seq_parameter_set_id;
+    uint32_t pic_order_cnt_type;
+    uint32_t delta_pic_order_always_zero_flag;
+    uint32_t frame_mbs_only_flag;
+    uint32_t timing_info_present_flag;
+    uint32_t num_units_in_tick;
+    uint32_t time_scale;
+    uint32_t fixed_frame_rate_flag;
+    uint32_t nal_hrd_parameters_present_flag;
+    uint32_t vcl_hrd_parameters_present_flag;
+    uint32_t pic_struct_present_flag;
+    cPictureTiming pic_timing_sei;
+  };
+
+  inline cSequenceParameterSet::cSequenceParameterSet(void)
+  {
+    memset(this, 0, sizeof (*this));
+    log2_max_frame_num_minus4(0);
+    log2_max_pic_order_cnt_lsb_minus4(0);
+    cpb_removal_delay_length_minus1(23);
+    dpb_output_delay_length_minus1(23);
+  }
+
+  // --- cPictureParameterSet ------------------------------------------------
+
+  class cPictureParameterSet {
+  private:
+    friend class cContext;
+    bool defined;
+  public:
+    cPictureParameterSet(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) { return defined; }
+    uint32_t pic_parameter_set_id;
+    uint32_t seq_parameter_set_id;
+    uint32_t pic_order_present_flag;
+  };
+
+  // --- cSliceHeader --------------------------------------------------------
+
+  class cSliceHeader {
+  private:
+    friend class cContext;
+    bool defined;
+    bool isFirstSliceOfCurrentAccessUnit;
+    uint32_t picOrderCntType;
+    uint32_t nalRefIdc;
+    uint32_t nalUnitType;
+  public:
+    cSliceHeader(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) const { return defined; }
+    bool IsFirstSliceOfCurrentAccessUnit(void) const { return isFirstSliceOfCurrentAccessUnit; }
+    void nal_ref_idc(uint32_t Value) { nalRefIdc = Value; }
+    uint32_t nal_ref_idc(void) const { return nalRefIdc; }
+    void nal_unit_type(uint32_t Value) { nalUnitType = Value; }
+    uint32_t nal_unit_type(void) const { return nalUnitType; }
+    uint32_t slice_type;
+    uint32_t pic_parameter_set_id;
+    uint32_t frame_num;
+    uint32_t field_pic_flag;
+    uint32_t bottom_field_flag;
+    uint32_t idr_pic_id;
+    uint32_t pic_order_cnt_lsb;
+    int32_t delta_pic_order_cnt_bottom;
+    int32_t delta_pic_order_cnt[2];
+    enum eAccessUnitType {
+      Frame = 0,
+      TopField,
+      BottomField
+      };
+    eAccessUnitType GetAccessUnitType() const { return (eAccessUnitType)(field_pic_flag + bottom_field_flag); }
+  };
+
+  // --- cContext ------------------------------------------------------------
+
+  class cContext {
+  private:
+    cSequenceParameterSet spsStore[32];
+    cPictureParameterSet ppsStore[256];
+    cSequenceParameterSet *sps; // active Sequence Parameter Set
+    cPictureParameterSet *pps; // active Picture Parameter Set
+    cSliceHeader sh;
+  public:
+    cContext(void) { sps = 0; pps = 0; }
+    void Define(cSequenceParameterSet &SPS);
+    void Define(cPictureParameterSet &PPS);
+    void Define(cSliceHeader &SH);
+    void Define(cPictureTiming &PT);
+    void ActivateSPS(uint32_t ID);
+    void ActivatePPS(uint32_t ID);
+    const cSequenceParameterSet *ActiveSPS(void) const { return sps; }
+    const cPictureParameterSet *ActivePPS(void) const { return pps; }
+    const cSliceHeader *CurrentSlice(void) const { return sh.Defined() ? &sh : 0; }
+    int GetFramesPerSec(void) const;
+  };
+
+  inline void cContext::ActivateSPS(uint32_t ID)
+  {
+    if (ID >= (sizeof (spsStore) / sizeof (*spsStore)))
+       throw new cException("ERROR: H264::cContext::ActivateSPS(): id out of range");
+    if (!spsStore[ID].Defined())
+       throw new cException("ERROR: H264::cContext::ActivateSPS(): requested SPS is undefined");
+    sps = &spsStore[ID];
+  }
+
+  inline void cContext::ActivatePPS(uint32_t ID)
+  {
+    if (ID >= (sizeof (ppsStore) / sizeof (*ppsStore)))
+       throw new cException("ERROR: H264::cContext::ActivatePPS(): id out of range");
+    if (!ppsStore[ID].Defined())
+       throw new cException("ERROR: H264::cContext::ActivatePPS(): requested PPS is undefined");
+    pps = &ppsStore[ID];
+    ActivateSPS(pps->seq_parameter_set_id);
+  }
+
+  inline void cContext::Define(cSequenceParameterSet &SPS)
+  {
+    if (SPS.seq_parameter_set_id >= (sizeof (spsStore) / sizeof (*spsStore)))
+       throw new cException("ERROR: H264::cContext::DefineSPS(): id out of range");
+    SPS.defined = true;
+    spsStore[SPS.seq_parameter_set_id] = SPS;
+  }
+
+  inline void cContext::Define(cPictureParameterSet &PPS)
+  {
+    if (PPS.pic_parameter_set_id >= (sizeof (ppsStore) / sizeof (*ppsStore)))
+       throw new cException("ERROR: H264::cContext::DefinePPS(): id out of range");
+    PPS.defined = true;
+    ppsStore[PPS.pic_parameter_set_id] = PPS;
+  }
+
+  inline void cContext::Define(cSliceHeader &SH)
+  {
+    SH.defined = true;
+    SH.picOrderCntType = ActiveSPS()->pic_order_cnt_type;
+
+    // ITU-T Rec. H.264 (03/2005): 7.4.1.2.4
+    SH.isFirstSliceOfCurrentAccessUnit = !sh.Defined()
+      || (sh.frame_num                  != SH.frame_num)
+      || (sh.pic_parameter_set_id       != SH.pic_parameter_set_id)
+      || (sh.field_pic_flag             != SH.field_pic_flag)
+      || (sh.bottom_field_flag          != SH.bottom_field_flag)
+      || (sh.nalRefIdc                  != SH.nalRefIdc
+      && (sh.nalRefIdc == 0             || SH.nalRefIdc == 0))
+      || (sh.picOrderCntType == 0       && SH.picOrderCntType == 0
+      && (sh.pic_order_cnt_lsb          != SH.pic_order_cnt_lsb
+      ||  sh.delta_pic_order_cnt_bottom != SH.delta_pic_order_cnt_bottom))
+      || (sh.picOrderCntType == 1       && SH.picOrderCntType == 1
+      && (sh.delta_pic_order_cnt[0]     != SH.delta_pic_order_cnt[0]
+      ||  sh.delta_pic_order_cnt[1]     != SH.delta_pic_order_cnt[1]))
+      || (sh.nalUnitType                != SH.nalUnitType
+      && (sh.nalUnitType == 5           || SH.nalUnitType == 5))
+      || (sh.nalUnitType == 5           && SH.nalUnitType == 5
+      &&  sh.idr_pic_id                 != SH.idr_pic_id);
+        
+    sh = SH;
+  }
+
+  inline void cContext::Define(cPictureTiming &PT)
+  {
+    PT.defined = true;
+    ((cSequenceParameterSet *)ActiveSPS())->pic_timing_sei = PT;
+  }
+
+  // --- cSimpleBuffer -------------------------------------------------------
+
+  class cSimpleBuffer {
+  private:
+    uchar *data;
+    int size;
+    int avail;
+    int gotten;
+  public:
+    cSimpleBuffer(int Size);
+    ~cSimpleBuffer();
+    int Size(void) { return size; }
+    int Available(void) { return avail; }
+    int Free(void) { return size - avail; }
+    int Put(const uchar *Data, int Count);
+    uchar *Get(int &Count);
+    void Del(int Count);
+    void Clear(void);
+  };
+
+  // --- cParser -------------------------------------------------------------
+
+  class cParser {
+  private:
+    bool syncing;
+    bool omitPicTiming;
+    cContext context;
+    cSimpleBuffer nalUnitDataBuffer;
+    void hrd_parameters(cSequenceParameterSet &SPS, cBitReader &br);
+    void ParseSequenceParameterSet(uint8_t *Data, int Count);
+    void ParsePictureParameterSet(uint8_t *Data, int Count);
+    void ParseSlice(uint8_t *Data, int Count);
+    void reserved_sei_message(uint32_t payloadSize, cBitReader &br);
+    void pic_timing(uint32_t payloadSize, cBitReader &br);
+    void buffering_period(uint32_t payloadSize, cBitReader &br);
+    void sei_payload(uint32_t payloadType, uint32_t payloadSize, cBitReader &br);
+    void sei_message(cBitReader &br);
+    void ParseSEI(uint8_t *Data, int Count);
+  public:
+    cParser(bool OmitPicTiming = true);
+    const cContext &Context(void) const { return context; }
+    void PutNalUnitData(const uchar *Data, int Count);
+    void Reset(void);
+    void Process(void);
+  };
+}
+
+#endif // __H264PARSER_H
+
diff -Nup ../vdr-1.5.10-orig/menu.c ./menu.c
--- ../vdr-1.5.10-orig/menu.c	2007-10-13 12:10:20.000000000 +0200
+++ ./menu.c	2007-10-14 17:27:00.000000000 +0200
@@ -218,6 +218,7 @@ class cMenuEditChannel : public cOsdMenu
 private:
   cChannel *channel;
   cChannel data;
+  int vcodec;
   char name[256];
   void Setup(void);
 public:
@@ -231,6 +232,9 @@ cMenuEditChannel::cMenuEditChannel(cChan
   channel = Channel;
   if (channel) {
      data = *channel;
+     vcodec = data.vpid / VPID_OFFSET_BASE;
+     data.vpid = VPID_FROM_ANY(data.vpid);
+
      if (New) {
         channel = NULL;
         data.nid = 0;
@@ -249,22 +253,28 @@ void cMenuEditChannel::Setup(void)
 
   Clear();
 
+  static const char *VideoCodecValues[] = {
+    tr("MPEG1/2"),
+    tr("H.264")
+    };
+
   // Parameters for all types of sources:
   strn0cpy(name, data.name, sizeof(name));
   Add(new cMenuEditStrItem( tr("Name"),          name, sizeof(name), tr(FileNameChars)));
   Add(new cMenuEditSrcItem( tr("Source"),       &data.source));
   Add(new cMenuEditIntItem( tr("Frequency"),    &data.frequency));
-  Add(new cMenuEditIntItem( tr("Vpid"),         &data.vpid,  0, 0x1FFF));
-  Add(new cMenuEditIntItem( tr("Ppid"),         &data.ppid,  0, 0x1FFF));
+  Add(new cMenuEditIntItem( tr("Vpid"),         &data.vpid,     0, 0x1FFF));
+  Add(new cMenuEditStraItem( tr("Vcodec"),      &vcodec, sizeof(VideoCodecValues) / sizeof(*VideoCodecValues), VideoCodecValues));
+  Add(new cMenuEditIntItem( tr("Ppid"),         &data.ppid,     0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Apid1"),        &data.apids[0], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Apid2"),        &data.apids[1], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Dpid1"),        &data.dpids[0], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Dpid2"),        &data.dpids[1], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Spid1"),        &data.spids[0], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Spid2"),        &data.spids[1], 0, 0x1FFF));
-  Add(new cMenuEditIntItem( tr("Tpid"),         &data.tpid,  0, 0x1FFF));
+  Add(new cMenuEditIntItem( tr("Tpid"),         &data.tpid,     0, 0x1FFF));
   Add(new cMenuEditCaItem(  tr("CA"),           &data.caids[0]));
-  Add(new cMenuEditIntItem( tr("Sid"),          &data.sid, 1, 0xFFFF));
+  Add(new cMenuEditIntItem( tr("Sid"),          &data.sid,      1, 0xFFFF));
   /* XXX not yet used
   Add(new cMenuEditIntItem( tr("Nid"),          &data.nid, 0));
   Add(new cMenuEditIntItem( tr("Tid"),          &data.tid, 0));
@@ -296,12 +306,14 @@ eOSState cMenuEditChannel::ProcessKey(eK
         if (Channels.HasUniqueChannelID(&data, channel)) {
            data.name = strcpyrealloc(data.name, name);
            if (channel) {
+              data.vpid += vcodec * VPID_OFFSET_BASE; 
               *channel = data;
               isyslog("edited channel %d %s", channel->Number(), *data.ToText());
               state = osBack;
               }
            else {
               channel = new cChannel;
+              data.vpid += vcodec * VPID_OFFSET_BASE; 
               *channel = data;
               Channels.Add(channel);
               Channels.ReNumber();
@@ -4160,7 +4172,7 @@ bool cReplayControl::ShowProgress(bool I
         lastCurrent = lastTotal = -1;
         }
      if (Total != lastTotal) {
-        displayReplay->SetTotal(IndexToHMSF(Total));
+        displayReplay->SetTotal(IndexToHMSF(Total, false, GetFramesPerSec()));
         if (!Initial)
            displayReplay->Flush();
         }
@@ -4168,7 +4180,7 @@ bool cReplayControl::ShowProgress(bool I
         displayReplay->SetProgress(Current, Total);
         if (!Initial)
            displayReplay->Flush();
-        displayReplay->SetCurrent(IndexToHMSF(Current, displayFrames));
+        displayReplay->SetCurrent(IndexToHMSF(Current, displayFrames, GetFramesPerSec()));
         displayReplay->Flush();
         lastCurrent = Current;
         }
@@ -4201,8 +4213,8 @@ void cReplayControl::TimeSearchProcess(e
 {
 #define STAY_SECONDS_OFF_END 10
   int Seconds = (timeSearchTime >> 24) * 36000 + ((timeSearchTime & 0x00FF0000) >> 16) * 3600 + ((timeSearchTime & 0x0000FF00) >> 8) * 600 + (timeSearchTime & 0x000000FF) * 60;
-  int Current = (lastCurrent / FRAMESPERSEC);
-  int Total = (lastTotal / FRAMESPERSEC);
+  int Current = (lastCurrent / GetFramesPerSec());
+  int Total = (lastTotal / GetFramesPerSec());
   switch (Key) {
     case k0 ... k9:
          if (timeSearchPos < 4) {
@@ -4229,7 +4241,7 @@ void cReplayControl::TimeSearchProcess(e
     case kDown:
     case kOk:
          Seconds = min(Total - STAY_SECONDS_OFF_END, Seconds);
-         Goto(Seconds * FRAMESPERSEC, Key == kDown || Key == kPause || Key == kOk);
+         Goto(Seconds * GetFramesPerSec(), Key == kDown || Key == kPause || Key == kOk);
          timeSearchActive = false;
          break;
     default:
@@ -4350,7 +4362,7 @@ void cReplayControl::EditTest(void)
         if ((m->Index() & 0x01) != 0)
            m = marks.Next(m);
         if (m) {
-           Goto(m->position - SecondsToFrames(3));
+           Goto(m->position - SecondsToFrames(3, GetFramesPerSec()));
            Play();
            }
         }
diff -Nup ../vdr-1.5.10-orig/pat.c ./pat.c
--- ../vdr-1.5.10-orig/pat.c	2007-09-02 12:44:19.000000000 +0200
+++ ./pat.c	2007-10-14 17:22:27.000000000 +0200
@@ -431,6 +431,9 @@ void cPatFilter::Process(u_short Pid, u_
                          }
                       }
                       break;
+              case 0x1b: //MPEG4
+                      Vpid = VPID_TO_H264(stream.getPid());
+                      break;
               //default: printf("PID: %5d %5d %2d %3d %3d\n", pmt.getServiceId(), stream.getPid(), stream.getStreamType(), pmt.getVersionNumber(), Channel->Number());//XXX
               }
             for (SI::Loop::Iterator it; (d = (SI::CaDescriptor*)stream.streamDescriptors.getNext(it, SI::CaDescriptorTag)); ) {
diff -Nup ../vdr-1.5.10-orig/recorder.c ./recorder.c
--- ../vdr-1.5.10-orig/recorder.c	2007-02-24 17:36:24.000000000 +0100
+++ ./recorder.c	2007-02-25 19:22:13.000000000 +0100
@@ -127,7 +127,7 @@ void cFileWriter::Action(void)
 // --- cRecorder -------------------------------------------------------------
 
 cRecorder::cRecorder(const char *FileName, tChannelID ChannelID, int Priority, int VPid, const int *APids, const int *DPids, const int *SPids)
-:cReceiver(ChannelID, Priority, VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
+:cReceiver(ChannelID, Priority, VPID_FROM_ANY(VPid), APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
 ,cThread("recording")
 {
   // Make sure the disk is up and running:
diff -Nup ../vdr-1.5.10-orig/recording.c ./recording.c
--- ../vdr-1.5.10-orig/recording.c	2007-10-14 12:21:54.000000000 +0200
+++ ./recording.c	2007-10-14 17:22:27.000000000 +0200
@@ -1518,11 +1518,11 @@ cUnbufferedFile *cFileName::NextFile(voi
 
 // --- Index stuff -----------------------------------------------------------
 
-cString IndexToHMSF(int Index, bool WithFrame)
+cString IndexToHMSF(int Index, bool WithFrame, int FramesPerSec)
 {
   char buffer[16];
-  int f = (Index % FRAMESPERSEC) + 1;
-  int s = (Index / FRAMESPERSEC);
+  int f = (Index % FramesPerSec) + 1;
+  int s = (Index / FramesPerSec);
   int m = s / 60 % 60;
   int h = s / 3600;
   s %= 60;
@@ -1530,17 +1530,17 @@ cString IndexToHMSF(int Index, bool With
   return buffer;
 }
 
-int HMSFToIndex(const char *HMSF)
+int HMSFToIndex(const char *HMSF, int FramesPerSec)
 {
   int h, m, s, f = 0;
   if (3 <= sscanf(HMSF, "%d:%d:%d.%d", &h, &m, &s, &f))
-     return (h * 3600 + m * 60 + s) * FRAMESPERSEC + f - 1;
+     return (h * 3600 + m * 60 + s) * FramesPerSec + f - 1;
   return 0;
 }
 
-int SecondsToFrames(int Seconds)
+int SecondsToFrames(int Seconds, int FramesPerSec)
 {
-  return Seconds * FRAMESPERSEC;
+  return Seconds * FramesPerSec;
 }
 
 // --- ReadFrame -------------------------------------------------------------
diff -Nup ../vdr-1.5.10-orig/recording.h ./recording.h
--- ../vdr-1.5.10-orig/recording.h	2007-10-14 12:11:34.000000000 +0200
+++ ./recording.h	2007-10-14 17:22:27.000000000 +0200
@@ -239,11 +239,11 @@ public:
   cUnbufferedFile *NextFile(void);
   };
 
-cString IndexToHMSF(int Index, bool WithFrame = false);
+cString IndexToHMSF(int Index, bool WithFrame = false, int FramesPerSec = FRAMESPERSEC);
       // Converts the given index to a string, optionally containing the frame number.
-int HMSFToIndex(const char *HMSF);
+int HMSFToIndex(const char *HMSF, int FramesPerSec = FRAMESPERSEC);
       // Converts the given string (format: "hh:mm:ss.ff") to an index.
-int SecondsToFrames(int Seconds); //XXX+ ->player???
+int SecondsToFrames(int Seconds, int FramesPerSec = FRAMESPERSEC); //XXX+ ->player???
       // Returns the number of frames corresponding to the given number of seconds.
 
 int ReadFrame(cUnbufferedFile *f, uchar *b, int Length, int Max);
diff -Nup ../vdr-1.5.10-orig/remux.c ./remux.c
--- ../vdr-1.5.10-orig/remux.c	2007-09-22 14:08:22.000000000 +0200
+++ ./remux.c	2007-10-14 17:28:45.000000000 +0200
@@ -19,6 +19,8 @@
 #include "channels.h"
 #include "shutdown.h"
 #include "tools.h"
+#include "recording.h"
+#include "h264parser.h"
 
 ePesHeader AnalyzePesHeader(const uchar *Data, int Count, int &PesPayloadOffset, bool *ContinuationHeader)
 {
@@ -100,8 +102,9 @@ protected:
   int suppressedLogMessages;
   bool LogAllowed(void);
   void DroppedData(const char *Reason, int Count) { LOG("%s (dropped %d bytes)", Reason, Count); }
+  virtual int Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
 public:
-  static int Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
+  static int PutAllOrNothing(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
   cRepacker(void);
   virtual ~cRepacker() {}
   virtual void Reset(void) { initiallySyncing = true; }
@@ -138,6 +141,11 @@ bool cRepacker::LogAllowed(void)
 
 int cRepacker::Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded)
 {
+  return PutAllOrNothing(ResultBuffer, Data, Count, CapacityNeeded);
+}
+
+int cRepacker::PutAllOrNothing(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded)
+{
   if (CapacityNeeded >= Count && ResultBuffer->Free() < CapacityNeeded) {
      esyslog("ERROR: possible result buffer overflow, dropped %d out of %d byte", CapacityNeeded, CapacityNeeded);
      return 0;
@@ -156,7 +164,7 @@ protected:
   int packetTodo;
   uchar fragmentData[6 + 65535 + 3];
   int fragmentLen;
-  uchar pesHeader[6 + 3 + 255 + 3];
+  uchar pesHeader[6 + 3 + 255 + 5 + 3]; // 5: H.264 AUD
   int pesHeaderLen;
   uchar pesHeaderBackup[6 + 3 + 255];
   int pesHeaderBackupLen;
@@ -164,7 +172,7 @@ protected:
   uint32_t localScanner;
   int localStart;
   bool PushOutPacket(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count);
-  virtual int QuerySnoopSize() { return 4; }
+  virtual int QuerySnoopSize(void) { return 4; }
   virtual void Reset(void);
   };
 
@@ -238,6 +246,132 @@ bool cCommonRepacker::PushOutPacket(cRin
   return true;
 }
 
+// --- cAudGenerator ---------------------------------------------------------
+
+class cAudGenerator {
+private:
+  H264::cSimpleBuffer buffer;
+  int overflowByteCount;
+  H264::cSliceHeader::eAccessUnitType accessUnitType;
+  int sliceTypes;
+public:
+  cAudGenerator(void);
+  void CollectSliceType(const H264::cSliceHeader *SH);
+  int CollectData(const uchar *Data, int Count);
+  void Generate(cRingBufferLinear *const ResultBuffer);
+};
+
+cAudGenerator::cAudGenerator()
+  : buffer(MAXFRAMESIZE)
+{
+  overflowByteCount = 0;
+  accessUnitType = H264::cSliceHeader::Frame;
+  sliceTypes = 0;
+}
+
+int cAudGenerator::CollectData(const uchar *Data, int Count)
+{
+  // buffer frame data until AUD can be generated
+  int n = buffer.Put(Data, Count);
+  overflowByteCount += (Count - n);
+  // always report "success" as an error message will be shown in Generate()
+  return Count;
+}
+
+void cAudGenerator::CollectSliceType(const H264::cSliceHeader *SH)
+{
+  if (!SH)
+     return;
+  // remember type of current access unit 
+  accessUnitType = SH->GetAccessUnitType();
+  // translate slice_type into part of primary_pic_type and merge them
+  switch (SH->slice_type) {
+    case 2: // I
+    case 7: // I only => I 
+         sliceTypes |= 0x10000;
+         break;
+    case 0: // P
+    case 5: // P only => I, P
+         sliceTypes |= 0x11000;
+         break;
+    case 1: // B
+    case 6: // B only => I, P, B
+         sliceTypes |= 0x11100;
+         break;
+    case 4: // SI
+    case 9: // SI only => SI
+         sliceTypes |= 0x00010;
+         break;
+    case 3: // SP
+    case 8: // SP only => SI, SP
+         sliceTypes |= 0x00011;
+         break;
+    }
+}
+
+void cAudGenerator::Generate(cRingBufferLinear *const ResultBuffer)
+{
+  int primary_pic_type;
+  // translate the merged primary_pic_type parts into primary_pic_type
+  switch (sliceTypes) {
+    case 0x10000: // I
+         primary_pic_type = 0;
+         break;
+    case 0x11000: // I, P
+         primary_pic_type = 1;
+         break;
+    case 0x11100: // I, P, B
+         primary_pic_type = 2;
+         break;
+    case 0x00010: // SI
+         primary_pic_type = 3;
+         break;
+    case 0x00011: // SI, SP
+         primary_pic_type = 4;
+         break;
+    case 0x10010: // I, SI
+         primary_pic_type = 5;
+         break;
+    case 0x11011: // I, SI, P, SP
+    case 0x10011: // I, SI, SP
+    case 0x11010: // I, SI, P
+         primary_pic_type = 6;
+         break;
+    case 0x11111: // I, SI, P, SP, B
+    case 0x11110: // I, SI, P, B
+         primary_pic_type = 7;
+         break;
+    default:
+         primary_pic_type = -1; // frame without slices?
+    }
+  // drop an incorrect frame
+  if (primary_pic_type < 0)
+     esyslog("ERROR: cAudGenerator::Generate(): dropping frame without slices");
+  else {
+     // drop a partitial frame
+     if (overflowByteCount > 0) 
+        esyslog("ERROR: cAudGenerator::Generate(): frame exceeds MAXFRAMESIZE bytes (required size: %d bytes), dropping frame", buffer.Size() + overflowByteCount);
+     else {
+        int Count;
+        uchar *Data = buffer.Get(Count);
+        int PesPayloadOffset = 0;
+        AnalyzePesHeader(Data, Count, PesPayloadOffset);
+        // enter primary_pic_type into AUD
+        Data[ PesPayloadOffset + 4 ] |= primary_pic_type << 5;
+        // mangle the "start code" to pass the information that this access unit is a
+        // bottom field to ScanVideoPacket() where this modification will be reverted.
+        if (accessUnitType == H264::cSliceHeader::BottomField)
+           Data[ PesPayloadOffset + 3 ] |= 0x80;
+        // store the buffered frame
+        cRepacker::PutAllOrNothing(ResultBuffer, Data, Count, Count);
+        }
+     }
+  // prepare for next run
+  buffer.Clear();
+  overflowByteCount = 0;
+  sliceTypes = 0;
+}
+
 // --- cVideoRepacker --------------------------------------------------------
 
 class cVideoRepacker : public cCommonRepacker {
@@ -248,6 +382,13 @@ private:
     scanPicture
     };
   int state;
+  H264::cParser *h264Parser;
+  int sliceSeen;
+  bool audSeen;
+  cAudGenerator *audGenerator;
+  void CheckAudGeneration(bool SliceNalUnitType, bool SyncPoint, const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
+  void PushOutCurrentFrameAndStartNewPacket(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
+  void HandleNalUnit(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel, const uchar *&NalPayload);
   void HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
   inline bool ScanDataForStartCodeSlow(const uchar *const Data);
   inline bool ScanDataForStartCodeFast(const uchar *&Data, const uchar *Limit);
@@ -256,30 +397,155 @@ private:
   inline bool ScanForEndOfPictureSlow(const uchar *&Data);
   inline bool ScanForEndOfPictureFast(const uchar *&Data, const uchar *Limit);
   inline bool ScanForEndOfPicture(const uchar *&Data, const uchar *Limit);
+  void CollectNalUnitData(const uchar *Data, int Count);
+protected:
+  virtual int Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
 public:
-  cVideoRepacker(void);
+  cVideoRepacker(bool H264);
+  ~cVideoRepacker();
   virtual void Reset(void);
   virtual void Repack(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count);
   virtual int BreakAt(const uchar *Data, int Count);
   };
 
-cVideoRepacker::cVideoRepacker(void)
+cVideoRepacker::cVideoRepacker(bool H264)
 {
+  h264Parser = (H264 ? new H264::cParser() : 0);
+  audGenerator = 0;
   Reset();
 }
 
+cVideoRepacker::~cVideoRepacker()
+{
+  delete h264Parser;
+  delete audGenerator;
+}
+
 void cVideoRepacker::Reset(void)
 {
   cCommonRepacker::Reset();
+  if (h264Parser)
+     h264Parser->Reset();
   scanner = 0xFFFFFFFF;
   state = syncing;
+  sliceSeen = -1;
+  audSeen = false;
+  delete audGenerator;
+  audGenerator = 0;
 }
 
-void cVideoRepacker::HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+int cVideoRepacker::Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded)
 {
-  // synchronisation is detected some bytes after frame start.
-  const int SkippedBytesLimit = 4;
+  if (!audGenerator)
+     return cCommonRepacker::Put(ResultBuffer, Data, Count, CapacityNeeded);
+
+  return audGenerator->CollectData(Data, Count);
+}
 
+void cVideoRepacker::CollectNalUnitData(const uchar *Data, int Count)
+{
+  if (h264Parser)
+     h264Parser->PutNalUnitData(Data, Count);
+}
+
+void cVideoRepacker::HandleNalUnit(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel, const uchar *&NalPayload)
+{
+  // valid NAL units start with a zero bit
+  if (*Data & 0x80) {
+     LOG("cVideoRepacker: found invalid NAL unit: stream seems to be scrambled or not demultiplexed");
+     return;
+     }
+
+  // collect NAL unit's remaining data and process it 
+  CollectNalUnitData(NalPayload, Data - 3 - NalPayload);
+  h264Parser->Process();
+
+  // collect 0x00 0x00 0x01 for current NAL unit
+  static const uchar InitPayload[3] = { 0x00, 0x00, 0x01 };
+  CollectNalUnitData(InitPayload, sizeof (InitPayload));
+  NalPayload = Data;
+
+  // which kind of NAL unit have we got?
+  const int nal_unit_type = *Data & 0x1F;
+  switch (nal_unit_type) {
+    case 1: // coded slice of a non-IDR picture
+    case 2: // coded slice data partition A
+    case 5: // coded slice of an IDR picture
+         CheckAudGeneration(true, false, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         break;
+    case 3: // coded slice data partition B
+    case 4: // coded slice data partition C
+    case 19: // coded slice of an auxiliary coded picture without partitioning
+         break;
+    case 6: // supplemental enhancement information (SEI)
+    case 7: // sequence parameter set
+    case 8: // picture parameter set
+    case 10: // end of sequence
+    case 11: // end of stream
+    case 13: // sequence parameter set extension
+         CheckAudGeneration(false, nal_unit_type == 7, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         break;
+    case 12: // filler data
+         break;
+    case 14 ... 18: // reserved
+         CheckAudGeneration(false, false, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+    case 20 ... 23: // reserved
+         LOG("cVideoRepacker: found reserved NAL unit type: stream seems to be scrambled");
+         break;
+    case 0: // unspecified
+    case 24 ... 31: // unspecified
+         LOG("cVideoRepacker: found unspecified NAL unit type: stream seems to be scrambled");
+         break;
+    case 9: { // access unit delimiter
+         audSeen = true;
+         CheckAudGeneration(false, true, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         // mangle the "start code" to pass the information that the next access unit will be
+         // a bottom field to ScanVideoPacket() where this modification will be reverted.
+         const H264::cSliceHeader *SH = h264Parser->Context().CurrentSlice();
+         if (SH && SH->GetAccessUnitType() == H264::cSliceHeader::TopField)
+            *(uchar *)Data |= 0x80;
+         }
+         break;
+    }
+}
+
+void cVideoRepacker::CheckAudGeneration(bool SliceNalUnitType, bool SyncPoint, const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
+  // we cannot generate anything until we have reached the synchronisation point
+  if (sliceSeen < 0 && !SyncPoint)
+     return;
+  // detect transition from slice to non-slice NAL units
+  const bool WasSliceSeen = (sliceSeen != false);
+  const bool IsSliceSeen = SliceNalUnitType;
+  sliceSeen = IsSliceSeen;
+  // collect slice types for AUD generation
+  if (WasSliceSeen && audGenerator)
+     audGenerator->CollectSliceType(h264Parser->Context().CurrentSlice());
+  // handle access unit delimiter at the transition from slice to non-slice NAL units
+  if (WasSliceSeen && !IsSliceSeen) {
+     // an Access Unit Delimiter indicates that the current picture is done. So let's
+     // push out the current frame to start a new packet for the next picture.
+     PushOutCurrentFrameAndStartNewPacket(Data, ResultBuffer, Payload, StreamID, MpegLevel);
+     if (state == findPicture) {
+        // go on with scanning the picture data
+        state++;
+        }
+     // generate the AUD and push out the buffered frame
+     if (audGenerator) {
+        audGenerator->Generate(ResultBuffer);
+        if (audSeen) {
+           // we nolonger need to generate AUDs as they are part of the stream
+           delete audGenerator;
+           audGenerator = 0;
+           }
+        }
+     else if (!audSeen) // we do need to generate AUDs
+        audGenerator = new cAudGenerator;
+     }
+}
+
+void cVideoRepacker::HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
   // which kind of start code have we got?
   switch (*Data) {
     case 0xB9 ... 0xFF: // system start codes
@@ -298,65 +564,9 @@ void cVideoRepacker::HandleStartCode(con
     case 0xB3: // sequence header code
     case 0xB8: // group start code
     case 0x00: // picture start code
-         if (state == scanPicture) {
-            // the above start codes indicate that the current picture is done. So
-            // push out the packet to start a new packet for the next picuture. If
-            // the byte count get's negative then the current buffer ends in a
-            // partitial start code that must be stripped off, as it shall be put
-            // in the next packet.
-            PushOutPacket(ResultBuffer, Payload, Data - 3 - Payload);
-            // go on with syncing to the next picture
-            state = syncing;
-            }
-         if (state == syncing) {
-            if (initiallySyncing) // omit report for the typical initial case
-               initiallySyncing = false;
-            else if (skippedBytes > SkippedBytesLimit) // report that syncing dropped some bytes
-               LOG("cVideoRepacker: skipped %d bytes to sync on next picture", skippedBytes - SkippedBytesLimit);
-            skippedBytes = 0;
-            // if there is a PES header available, then use it ...
-            if (pesHeaderBackupLen > 0) {
-               // ISO 13818-1 says:
-               // In the case of video, if a PTS is present in a PES packet header
-               // it shall refer to the access unit containing the first picture start
-               // code that commences in this PES packet. A picture start code commences
-               // in PES packet if the first byte of the picture start code is present
-               // in the PES packet.
-               memcpy(pesHeader, pesHeaderBackup, pesHeaderBackupLen);
-               pesHeaderLen = pesHeaderBackupLen;
-               pesHeaderBackupLen = 0;
-               }
-            else {
-               // ... otherwise create a continuation PES header
-               pesHeaderLen = 0;
-               pesHeader[pesHeaderLen++] = 0x00;
-               pesHeader[pesHeaderLen++] = 0x00;
-               pesHeader[pesHeaderLen++] = 0x01;
-               pesHeader[pesHeaderLen++] = StreamID; // video stream ID
-               pesHeader[pesHeaderLen++] = 0x00; // length still unknown
-               pesHeader[pesHeaderLen++] = 0x00; // length still unknown
-
-               if (MpegLevel == phMPEG2) {
-                  pesHeader[pesHeaderLen++] = 0x80;
-                  pesHeader[pesHeaderLen++] = 0x00;
-                  pesHeader[pesHeaderLen++] = 0x00;
-                  }
-               else
-                  pesHeader[pesHeaderLen++] = 0x0F;
-               }
-            // append the first three bytes of the start code
-            pesHeader[pesHeaderLen++] = 0x00;
-            pesHeader[pesHeaderLen++] = 0x00;
-            pesHeader[pesHeaderLen++] = 0x01;
-            // the next packet's payload will begin with the fourth byte of
-            // the start code (= the actual code)
-            Payload = Data;
-            // as there is no length information available, assume the
-            // maximum we can hold in one PES packet
-            packetTodo = maxPacketSize - pesHeaderLen;
-            // go on with finding the picture data
-            state++;
-            }
+         // the above start codes indicate that the current picture is done. So let's
+         // push out the current frame to start a new packet for the next picture.
+         PushOutCurrentFrameAndStartNewPacket(Data, ResultBuffer, Payload, StreamID, MpegLevel);
          break;
     case 0x01 ... 0xAF: // slice start codes
          if (state == findPicture) {
@@ -367,6 +577,81 @@ void cVideoRepacker::HandleStartCode(con
     }
 }
 
+void cVideoRepacker::PushOutCurrentFrameAndStartNewPacket(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
+  // synchronisation is detected some bytes after frame start.
+  const int SkippedBytesLimit = 4;
+
+  if (state == scanPicture) {
+     // picture data has been found so let's push out the current frame.
+     // If the byte count get's negative then the current buffer ends in a
+     // partitial start code that must be stripped off, as it shall be put
+     // in the next packet.
+     PushOutPacket(ResultBuffer, Payload, Data - 3 - Payload);
+     // go on with syncing to the next picture
+     state = syncing;
+     }
+  // when already synced to a picture, just go on collecting data 
+  if (state != syncing)
+     return;
+  // we're synced to a picture so prepare a new packet
+  if (initiallySyncing) // omit report for the typical initial case
+     initiallySyncing = false;
+  else if (skippedBytes > SkippedBytesLimit) // report that syncing dropped some bytes
+     LOG("cVideoRepacker: skipped %d bytes to sync on next picture", skippedBytes - SkippedBytesLimit);
+  skippedBytes = 0;
+  // if there is a PES header available, then use it ...
+  if (pesHeaderBackupLen > 0) {
+     // ISO 13818-1 says:
+     // In the case of video, if a PTS is present in a PES packet header
+     // it shall refer to the access unit containing the first picture start
+     // code that commences in this PES packet. A picture start code commences
+     // in PES packet if the first byte of the picture start code is present
+     // in the PES packet.
+     memcpy(pesHeader, pesHeaderBackup, pesHeaderBackupLen);
+     pesHeaderLen = pesHeaderBackupLen;
+     pesHeaderBackupLen = 0;
+     }
+  else {
+     // ... otherwise create a continuation PES header
+     pesHeaderLen = 0;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x01;
+     pesHeader[pesHeaderLen++] = StreamID; // video stream ID
+     pesHeader[pesHeaderLen++] = 0x00; // length still unknown
+     pesHeader[pesHeaderLen++] = 0x00; // length still unknown
+
+     if (MpegLevel == phMPEG2) {
+        pesHeader[pesHeaderLen++] = 0x80;
+        pesHeader[pesHeaderLen++] = 0x00;
+        pesHeader[pesHeaderLen++] = 0x00;
+        }
+     else
+        pesHeader[pesHeaderLen++] = 0x0F;
+     }
+  // add an AUD in H.264 mode when not present in stream
+  if (h264Parser && !audSeen) {
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x01;
+     pesHeader[pesHeaderLen++] = 0x09; // access unit delimiter
+     pesHeader[pesHeaderLen++] = 0x10; // will be filled later
+     }
+  // append the first three bytes of the start code
+  pesHeader[pesHeaderLen++] = 0x00;
+  pesHeader[pesHeaderLen++] = 0x00;
+  pesHeader[pesHeaderLen++] = 0x01;
+  // the next packet's payload will begin with the fourth byte of
+  // the start code (= the actual code)
+  Payload = Data;
+  // as there is no length information available, assume the
+  // maximum we can hold in one PES packet
+  packetTodo = maxPacketSize - pesHeaderLen;
+  // go on with finding the picture data
+  state++;
+}
+
 bool cVideoRepacker::ScanDataForStartCodeSlow(const uchar *const Data)
 {
   scanner <<= 8;
@@ -458,14 +743,19 @@ void cVideoRepacker::Repack(cRingBufferL
   const uchar *data = Data + done;
   // remember start of the data
   const uchar *payload = data;
+  const uchar *NalPayload = payload;
 
   while (todo > 0) {
         // collect number of skipped bytes while syncing
         if (state <= syncing)
            skippedBytes++;
         // did we reach a start code?
-        if (ScanDataForStartCode(data, done, todo))
-           HandleStartCode(data, ResultBuffer, payload, Data[3], mpegLevel);
+        if (ScanDataForStartCode(data, done, todo)) {
+           if (h264Parser)
+              HandleNalUnit(data, ResultBuffer, payload, Data[3], mpegLevel, NalPayload);
+           else
+              HandleStartCode(data, ResultBuffer, payload, Data[3], mpegLevel);
+           }
         // move on
         data++;
         done++;
@@ -568,6 +858,8 @@ void cVideoRepacker::Repack(cRingBufferL
         fragmentLen += bite;
         }
      }
+  // always collect remaining NAL unit data (may be needed for syncing)
+  CollectNalUnitData(NalPayload, data - NalPayload);
   // report that syncing dropped some bytes
   if (skippedBytes > SkippedBytesLimit) {
      if (!initiallySyncing) // omit report for the typical initial case
@@ -581,13 +873,22 @@ bool cVideoRepacker::ScanForEndOfPicture
   localScanner <<= 8;
   localScanner |= *Data++;
   // check start codes which follow picture data
-  switch (localScanner) {
-    case 0x00000100: // picture start code
-    case 0x000001B8: // group start code
-    case 0x000001B3: // sequence header code
-    case 0x000001B7: // sequence end code
-         return true;
-    }
+  if (h264Parser) {
+     int nal_unit_type = localScanner & 0x1F;
+     switch (nal_unit_type) {
+       case 9: // access unit delimiter
+            return true;
+       }
+     }
+  else {
+     switch (localScanner) {
+       case 0x00000100: // picture start code
+       case 0x000001B8: // group start code
+       case 0x000001B3: // sequence header code
+       case 0x000001B7: // sequence end code
+            return true;
+       }
+     }
   return false;
 }
 
@@ -601,15 +902,27 @@ bool cVideoRepacker::ScanForEndOfPicture
         else {
            localScanner = 0x00000100 | *++Data;
            // check start codes which follow picture data
-           switch (localScanner) {
-             case 0x00000100: // picture start code
-             case 0x000001B8: // group start code
-             case 0x000001B3: // sequence header code
-             case 0x000001B7: // sequence end code
-                  Data++;
-                  return true;
-             default:
-                  Data += 3;
+           if (h264Parser) {
+              int nal_unit_type = localScanner & 0x1F;
+              switch (nal_unit_type) {
+                case 9: // access unit delimiter
+                     Data++;
+                     return true;
+                default:
+                     Data += 3;
+                }
+              }
+           else {
+              switch (localScanner) {
+                case 0x00000100: // picture start code
+                case 0x000001B8: // group start code
+                case 0x000001B3: // sequence header code
+                case 0x000001B7: // sequence end code
+                     Data++;
+                     return true;
+                default:
+                     Data += 3;
+                }
              }
            }
         }
@@ -1521,7 +1834,7 @@ void cTS2PES::store(uint8_t *Data, int C
   if (repacker)
      repacker->Repack(resultBuffer, Data, Count);
   else
-     cRepacker::Put(resultBuffer, Data, Count, Count);
+     cRepacker::PutAllOrNothing(resultBuffer, Data, Count, Count);
 }
 
 void cTS2PES::reset_ipack(void)
@@ -1841,7 +2154,7 @@ void cTS2PES::ts_to_pes(const uint8_t *B
         // Enable this if you are having problems with signal quality.
         // These are the errors I used to get with Nova-T when antenna
         // was not positioned correcly (not transport errors). //tvr
-        //dsyslog("TS continuity error (%d)", ccCounter);
+        dsyslog("TS continuity error (%d)", ccCounter);
         }
      ccCounter = Buf[3] & CONT_CNT_MASK;
      }
@@ -1873,12 +2186,15 @@ void cTS2PES::ts_to_pes(const uint8_t *B
 
 #define RESULTBUFFERSIZE KILOBYTE(256)
 
-cRemux::cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure)
+cRemux::cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure, bool SyncEarly)
 {
+  h264 = VPID_IS_H264(VPid);
+  VPid = VPID_FROM_ANY(VPid);
   exitOnFailure = ExitOnFailure;
   noVideo = VPid == 0 || VPid == 1 || VPid == 0x1FFF;
   numUPTerrors = 0;
   synced = false;
+  syncEarly = SyncEarly;
   skipped = 0;
   numTracks = 0;
   resultSkipped = 0;
@@ -1887,7 +2203,7 @@ cRemux::cRemux(int VPid, const int *APid
   if (VPid)
 #define TEST_cVideoRepacker
 #ifdef TEST_cVideoRepacker
-     ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0, 0x00, new cVideoRepacker);
+     ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0, 0x00, new cVideoRepacker(h264));
 #else
      ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0);
 #endif
@@ -1937,6 +2253,23 @@ int cRemux::GetPacketLength(const uchar 
   return -1;
 }
 
+bool cRemux::IsFrameH264(const uchar *Data, int Length)
+{
+  int PesPayloadOffset;
+  const uchar *limit = Data + Length;
+  if (AnalyzePesHeader(Data, Length, PesPayloadOffset) <= phInvalid)
+     return false; // neither MPEG1 nor MPEG2
+
+  Data += PesPayloadOffset + 3; // move to video payload and skip 00 00 01
+  if (Data < limit) {
+     // cVideoRepacker ensures that in case of H264 we will see an access unit delimiter here
+     if (0x01 == Data[-1] && 9 == Data[0] && 0x00 == Data[-2] && 0x00 == Data[-3])
+        return true;
+     }
+
+  return false;
+}
+
 int cRemux::ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType)
 {
   // Scans the video packet starting at Offset and returns its length.
@@ -1955,23 +2288,67 @@ int cRemux::ScanVideoPacket(const uchar 
            if (p[-2] || p[-1] || p[0] != 0x01)
               pLimit = 0; // skip scanning: packet doesn't start with 0x000001
            else {
-              switch (p[1]) {
-                case SC_SEQUENCE:
-                case SC_GROUP:
-                case SC_PICTURE:
-                     break;
-                default: // skip scanning: packet doesn't start a new sequence, group or picture
-                     pLimit = 0;
-                }
+              if (h264) {
+                 int nal_unit_type = p[1] & 0x1F;
+                 switch (nal_unit_type) {
+                   case 9: // access unit delimiter
+                        // when the MSB in p[1] is set (which violates H.264) then this is a hint
+                        // from cVideoRepacker::HandleNalUnit() that this bottom field shall not
+                        // be reported as picture.
+                        if (p[1] & 0x80)
+                           ((uchar *)p)[1] &= ~0x80; // revert the hint and fall through
+                        else
+                           break;
+                   default: // skip scanning: packet doesn't start a new picture
+                        pLimit = 0;
+                   }
+                 }
+              else {
+                 switch (p[1]) {
+                   case SC_SEQUENCE:
+                   case SC_GROUP:
+                   case SC_PICTURE:
+                        break;
+                   default: // skip scanning: packet doesn't start a new sequence, group or picture
+                        pLimit = 0;
+                   }
+                 }
               }
            }
 #endif
         while (p < pLimit && (p = (const uchar *)memchr(p, 0x01, pLimit - p))) {
               if (!p[-2] && !p[-1]) { // found 0x000001
-                 switch (p[1]) {
-                   case SC_PICTURE: PictureType = (p[3] >> 3) & 0x07;
-                                    return Length;
-                   }
+                 if (h264) {
+                    int nal_unit_type = p[1] & 0x1F;
+                    switch (nal_unit_type) {
+                      case 9: { // access unit delimiter
+                              int primary_pic_type = p[2] >> 5;
+                              switch (primary_pic_type) {
+                                case 0: // I
+                                case 3: // SI
+                                case 5: // I, SI
+                                     PictureType = I_FRAME;
+                                     break;
+                                case 1: // I, P
+                                case 4: // SI, SP
+                                case 6: // I, SI, P, SP
+                                     PictureType = P_FRAME;
+                                     break;
+                                case 2: // I, P, B
+                                case 7: // I, SI, P, SP, B
+                                     PictureType = B_FRAME;
+                                     break;
+                                }
+                              return Length;
+                              }
+                      }
+                    }
+                 else {
+                    switch (p[1]) {
+                      case SC_PICTURE: PictureType = (p[3] >> 3) & 0x07;
+                                       return Length;
+                      }
+                    }
                  p += 4; // continue scanning after 0x01ssxxyy
                  }
               else
@@ -2080,12 +2457,14 @@ uchar *cRemux::Get(int &Count, uchar *Pi
                         ShutdownHandler.RequestEmergencyExit();
                      }
                   else if (!synced) {
-                     if (pt == I_FRAME) {
+                     if (pt == I_FRAME || syncEarly) {
                         if (PictureType)
                            *PictureType = pt;
                         resultSkipped = i; // will drop everything before this position
-                        SetBrokenLink(data + i, l);
                         synced = true;
+                        if (pt == I_FRAME) // syncEarly: it's ok but there is no need to call SetBrokenLink()
+                           SetBrokenLink(data + i, l);
+else fprintf(stderr, "video: synced early\n");
                         }
                      }
                   else if (Count)
@@ -2098,12 +2477,13 @@ uchar *cRemux::Get(int &Count, uchar *Pi
                l = GetPacketLength(data, resultCount, i);
                if (l < 0)
                   return resultData;
-               if (noVideo) {
+               if (noVideo || !synced && syncEarly) {
                   if (!synced) {
-                     if (PictureType)
+                     if (PictureType && noVideo)
                         *PictureType = I_FRAME;
                      resultSkipped = i; // will drop everything before this position
                      synced = true;
+if (!noVideo) fprintf(stderr, "audio: synced early\n");
                      }
                   else if (Count)
                      return resultData;
diff -Nup ../vdr-1.5.10-orig/remux.h ./remux.h
--- ../vdr-1.5.10-orig/remux.h	2007-09-02 12:19:06.000000000 +0200
+++ ./remux.h	2007-10-14 17:27:40.000000000 +0200
@@ -38,21 +38,25 @@ class cRemux {
 private:
   bool exitOnFailure;
   bool noVideo;
+  bool h264;
   int numUPTerrors;
   bool synced;
+  bool syncEarly;
   int skipped;
   cTS2PES *ts2pes[MAXTRACKS];
   int numTracks;
   cRingBufferLinear *resultBuffer;
   int resultSkipped;
   int GetPid(const uchar *Data);
+  int ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType);
 public:
-  cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure = false);
+  cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure = false, bool SyncEarly = false);
        ///< Creates a new remuxer for the given PIDs. VPid is the video PID, while
        ///< APids, DPids and SPids are pointers to zero terminated lists of audio,
        ///< dolby and subtitle PIDs (the pointers may be NULL if there is no such
        ///< PID). If ExitOnFailure is true, the remuxer will initiate an "emergency
-       ///< exit" in case of problems with the data stream.
+       ///< exit" in case of problems with the data stream. SyncEarly causes cRemux
+       ///< to sync as soon as a video or audio frame is seen.
   ~cRemux();
   void SetTimeouts(int PutTimeout, int GetTimeout) { resultBuffer->SetTimeouts(PutTimeout, GetTimeout); }
        ///< By default cRemux assumes that Put() and Get() are called from different
@@ -78,7 +82,7 @@ public:
        ///< settings as they are.
   static void SetBrokenLink(uchar *Data, int Length);
   static int GetPacketLength(const uchar *Data, int Count, int Offset);
-  static int ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType);
+  static bool IsFrameH264(const uchar *Data, int Length);
   };
 
 #endif // __REMUX_H
diff -Nup ../vdr-1.5.10-orig/sdt.c ./sdt.c
--- ../vdr-1.5.10-orig/sdt.c	2007-06-10 10:50:49.000000000 +0200
+++ ./sdt.c	2007-06-17 18:35:37.000000000 +0200
@@ -55,6 +55,7 @@ void cSdtFilter::Process(u_short Pid, u_
                    case 0x02: // digital radio sound service
                    case 0x04: // NVOD reference service
                    case 0x05: // NVOD time-shifted service
+                   case 0x19: // digital HD television service
                         {
                         char NameBuf[Utf8BufSize(1024)];
                         char ShortNameBuf[Utf8BufSize(1024)];
diff -Nup ../vdr-1.5.10-orig/svdrp.c ./svdrp.c
--- ../vdr-1.5.10-orig/svdrp.c	2007-10-13 12:17:48.000000000 +0200
+++ ./svdrp.c	2007-10-14 17:22:27.000000000 +0200
@@ -1297,8 +1297,10 @@ void cSVDRP::CmdPLAY(const char *Option)
                  int x = sscanf(option, "%d:%d:%d.%d", &h, &m, &s, &f);
                  if (x == 1)
                     pos = h;
-                 else if (x >= 3)
-                    pos = (h * 3600 + m * 60 + s) * FRAMESPERSEC + f - 1;
+                 else if (x >= 3) {
+                    int FramesPerSec = cUnbufferedFile::GetFramesPerSec(recording->FileName());
+                    pos = (h * 3600 + m * 60 + s) * FramesPerSec + f - 1;
+                    }
                  }
               cResumeFile resume(recording->FileName());
               if (pos <= 0)
diff -Nup ../vdr-1.5.10-orig/tools.c ./tools.c
--- ../vdr-1.5.10-orig/tools.c	2007-10-13 14:00:21.000000000 +0200
+++ ./tools.c	2007-10-14 17:22:27.000000000 +0200
@@ -27,6 +27,8 @@ extern "C" {
 #include <utime.h>
 #include "i18n.h"
 #include "thread.h"
+#include "remux.h"
+#include "recording.h"
 
 int SysLogLevel = 3;
 
@@ -1566,6 +1568,112 @@ cUnbufferedFile *cUnbufferedFile::Create
   return File;
 }
 
+int cUnbufferedFile::GetFramesPerSec(const char *FileName)
+{
+  // use this constant as a fallback value
+  int FramesPerSec = FRAMESPERSEC;
+  // open the file an determine frames per second
+  cFileName fn(FileName, false);
+  cUnbufferedFile *f = fn.Open();
+  if (f) {
+     FramesPerSec = f->GetFramesPerSec();
+     fn.Close();
+     }
+  return FramesPerSec;
+}
+
+#define ADD_H264_SUPPORT 1
+
+#ifdef ADD_H264_SUPPORT
+#include "h264parser.h"
+#endif
+
+int cUnbufferedFile::GetFramesPerSec(void)
+{
+  // use this constant as a fallback value
+  int FramesPerSec = FRAMESPERSEC;
+  // rember current file position to restore later
+  off_t OrigPos = curpos;
+  // seek to the beginning and read a chunk of data
+  if (0 == Seek(0, SEEK_SET)) {
+     uchar Data[2048];
+     ssize_t Count = Read(Data, sizeof(Data));
+     if (Count > 0) {
+        // this chunk of data should actually be a PES packet
+        uchar *Limit = Data + Count;
+        int PesPayloadOffset = 0;
+        if (AnalyzePesHeader(Data, Count, PesPayloadOffset) == phMPEG2) {
+           // we need a video stream -- radio recordings use the default
+           if ((Data[3] & 0xF0) == 0xE0) {
+              uchar *p = Data + PesPayloadOffset;
+#ifdef ADD_H264_SUPPORT
+              // check whether this is a H.264 video frame
+              if (cRemux::IsFrameH264(Data, Count)) {
+                 // need to have a H264 parser since picture timing is rather complex
+                 H264::cParser H264parser(false);
+                 // send NAL units to parser until it is able to provide frames per second
+                 while (p < Limit) {
+                       // find next NAL unit
+                       uchar *pNext = (uchar *)memmem(p + 4, Limit - (p + 4), "\x00\x00\x01", 3);
+                       if (!pNext) // just pass the remainder
+                          pNext = Limit; 
+                       H264parser.PutNalUnitData(p, pNext - p);
+                       // process NAL unit and check for frames per second
+                       H264parser.Process();
+                       int FPS = H264parser.Context().GetFramesPerSec();
+                       if (FPS != -1) { // there we are ;-)
+                          FramesPerSec = FPS;
+fprintf(stderr, "FramesPerSec: %d\n", FramesPerSec);
+                          break;
+                          }
+                       // continue with next NAL unit
+                       p = pNext;   
+                       }
+                 }
+              else {
+#endif
+                 // thanks to cVideoRepacker, the payload starts with a sequence header
+                 if (p + 12 <= Limit) {
+                    if (p[0] == 0x00 && p[1] == 0x00 && p[2] == 0x01 && p[3] == 0xB3) {
+                       uint32_t frame_rate_code = p[7] & 0x0F;
+                       uint32_t frame_rate_extension_n = 0;
+                       uint32_t frame_rate_extension_d = 0;
+                       // now we need to have a look at the next startcode, 
+                       // as it might be a sequence extension
+                       p = (uchar *)memmem(p + 12, Limit - (p + 12), "\x00\x00\x01", 3);
+                       if (p && p + 4 < Limit && p[3] == 0xB5) { // extension start code
+                          if (p + 5 < Limit && (p[4] >> 4) == 0x1) { // sequence extension
+                             if (p + 10 < Limit) {
+                                frame_rate_extension_n = (p[9] & 0x60) >> 5;
+                                frame_rate_extension_d = (p[9] & 0x1F);
+                                }
+                             }
+                          }
+                       // calculate frame rate and round it for compatibility
+                       if (0x1 <= frame_rate_code && frame_rate_code <= 0x8) {
+                          static const int n[] = { -1, 24000, 24, 25, 30000, 30, 50, 60000, 60 };
+                          static const int d[] = { -1,  1001,  1,  1,  1001,  1,  1,  1001,  1 };
+                          double frame_rate = n[frame_rate_code] * (frame_rate_extension_n + 1) 
+                                   / (double)(d[frame_rate_code] * (frame_rate_extension_d + 1));
+                          FramesPerSec = (int)frame_rate;
+                          if (frame_rate - FramesPerSec > 0.5)
+                             FramesPerSec++;
+fprintf(stderr, "FramesPerSec: %d\n", FramesPerSec);
+                          }
+                       }
+                    }
+#ifdef ADD_H264_SUPPORT
+                 }
+#endif
+              }
+           }
+        }
+     }
+  // restore original position
+  Seek(OrigPos, SEEK_SET);
+  return FramesPerSec;
+}
+
 // --- cLockFile -------------------------------------------------------------
 
 #define LOCKFILENAME      ".lock-vdr"
diff -Nup ../vdr-1.5.10-orig/tools.h ./tools.h
--- ../vdr-1.5.10-orig/tools.h	2007-08-25 16:16:39.000000000 +0200
+++ ./tools.h	2007-08-28 22:25:08.000000000 +0200
@@ -349,6 +349,8 @@ public:
   ssize_t Read(void *Data, size_t Size);
   ssize_t Write(const void *Data, size_t Size);
   static cUnbufferedFile *Create(const char *FileName, int Flags, mode_t Mode = DEFFILEMODE);
+  static int GetFramesPerSec(const char *FileName);
+  int GetFramesPerSec(void);
   };
 
 class cLockFile {
diff -Nup ../vdr-1.5.10-orig/transfer.c ./transfer.c
--- ../vdr-1.5.10-orig/transfer.c	2007-01-05 11:45:28.000000000 +0100
+++ ./transfer.c	2007-09-01 21:30:01.000000000 +0200
@@ -15,11 +15,11 @@
 // --- cTransfer -------------------------------------------------------------
 
 cTransfer::cTransfer(tChannelID ChannelID, int VPid, const int *APids, const int *DPids, const int *SPids)
-:cReceiver(ChannelID, -1, VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
+:cReceiver(ChannelID, -1, VPID_FROM_ANY(VPid), APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
 ,cThread("transfer")
 {
   ringBuffer = new cRingBufferLinear(TRANSFERBUFSIZE, TS_SIZE * 2, true, "Transfer");
-  remux = new cRemux(VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids);
+  remux = new cRemux(VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids, false, true);
 }
 
 cTransfer::~cTransfer()
_______________________________________________
vdr mailing list
vdr@xxxxxxxxxxx
http://www.linuxtv.org/cgi-bin/mailman/listinfo/vdr

[Index of Archives]     [Linux Media]     [Asterisk]     [DCCP]     [Netdev]     [Xorg]     [Util Linux NG]     [Xfree86]     [Big List of Linux Books]     [Fedora Users]     [Fedora Women]     [ALSA Devel]     [Linux USB]

  Powered by Linux