SageTV Community  

Go Back   SageTV Community > SageTV Development and Customizations > SageTV Github Development
Forum Rules FAQs Community Downloads Today's Posts Search

Notices

SageTV Github Development Discussion related to SageTV Open Source Development. Use this forum for development topics about the Open Source versions of SageTV, hosted on Github.

Reply
 
Thread Tools Search this Thread Display Modes
  #21  
Old 09-03-2015, 07:06 PM
stuckless's Avatar
stuckless stuckless is offline
SageTVaholic
 
Join Date: Oct 2007
Location: London, Ontario, Canada
Posts: 9,713
Quote:
Originally Posted by wnjj View Post
Hi Stuckless,

I took a look at the "original" source versus the Sage one and your newest one of mplayer.c

I think I've gotten all of the "Jeff" edits into the pause_loop function. Want to give it a shot?

What's the best way to get it to you?
Since I haven't done any other changes, you can post it/email it... but if you are familar with git/github, you can clone the repo and create a pull request, and I'll accept it.

and thanks
Reply With Quote
  #22  
Old 09-03-2015, 07:07 PM
wnjj wnjj is offline
Sage Icon
 
Join Date: Jan 2009
Posts: 1,514
Ok. Wasn't sure if posting code snippets is ok. I'm new to this open source stuff.

Code:
static void pause_loop(void)
{
    mp_cmd_t *cmd;
#ifdef CONFIG_STREAM_CACHE
    int old_cache_fill = stream_cache_size > 0 ? cache_fill_status(mpctx->stream) : 0;
#endif
    if (!quiet) {
        if (term_osd && !mpctx->sh_video) {
            set_osd_msg(OSD_MSG_PAUSE, 1, 0, MSGTR_Paused);
            update_osd_msg();
        } //else
	// NARFLEX: SageTV we always want this Paused message since we use it to detect play state!, don't hide it with an else
            mp_msg(MSGT_CPLAYER, MSGL_STATUS, "\n"MSGTR_Paused "\r");
        mp_msg(MSGT_IDENTIFY, MSGL_INFO, "ID_PAUSED\n");
    }
#ifdef CONFIG_GUI
    if (use_gui)
        gui(GUI_SET_STATE, (void *)GUI_PAUSE);
#endif
    if (mpctx->video_out && mpctx->sh_video && vo_config_count)
        mpctx->video_out->control(VOCTRL_PAUSE, NULL);

    if (mpctx->audio_out && mpctx->sh_audio)
        mpctx->audio_out->pause();  // pause audio, keep data if possible

	  // Also process volume/mute commands here (we don't want to have it step frames in that case)
	  while (1)
	  {
		  cmd = mp_input_get_cmd(20,1,1);
		  if (cmd == NULL)
		  {
	if (mpctx->sh_video && mpctx->video_out && vo_config_count)
	    mpctx->video_out->check_events();
#ifdef CONFIG_GUI
        if (use_gui) {
            gui(GUI_REDRAW, 0);
            if (guiInfo.Playing != GUI_PAUSE || (rel_seek_secs || abs_seek_pos))
                break;
        }
#endif
#ifdef CONFIG_MENU
        if (vf_menu)
            vf_menu_pause_update(vf_menu);
#endif
#ifdef CONFIG_STREAM_CACHE
        if (!quiet && stream_cache_size > 0) {
            int new_cache_fill = cache_fill_status(mpctx->stream);
            if (new_cache_fill != old_cache_fill) {
                if (term_osd && !mpctx->sh_video) {
                    set_osd_msg(OSD_MSG_PAUSE, 1, 0, MSGTR_Paused " %d%%",
                                new_cache_fill);
                    update_osd_msg();
                } else
                    mp_msg(MSGT_CPLAYER, MSGL_STATUS, MSGTR_Paused " %d%%\r",
                           new_cache_fill);
                old_cache_fill = new_cache_fill;
            }
        }
#endif
        if (mpctx->sh_video)
            handle_udp_master(mpctx->sh_video->pts);
        usec_sleep(20000);
    }
 		 else
 		 {
 			 int usedCmd = 1;
 printf("cmdid=%d\n", cmd->id);
 			 switch (cmd->id)
 			 {
 				case MP_CMD_VOLUME :
 				{
 					float v = cmd->args[0].v.f;
 						// start change for absolute volume value
 	    			int abs = (cmd->nargs > 1) ? cmd->args[1].v.i : 0;
 					float currVolume;
 					if( abs )
 					{
 						if (mpctx->mixer.muted)
 						{
 							mpctx->mixer.last_l = mpctx->mixer.last_r = v;
 						}
 						else
 							mixer_setvolume(&mpctx->mixer, (float)v, (float)v );
 						currVolume = v;
 					}
 					else 
 					{
 					  if(v > 0)
 					  {
 						  if (mpctx->mixer.muted)
 						  {
 							  mpctx->mixer.last_l += mpctx->mixer.volstep;
 							  mpctx->mixer.last_r += mpctx->mixer.volstep;
 							  if (mpctx->mixer.last_l > 100)
 								  mpctx->mixer.last_l = 100;
 							  if (mpctx->mixer.last_r > 100)
 								  mpctx->mixer.last_r = 100;
 							  currVolume = mpctx->mixer.last_l;
 						  }
 						  else
 						  {
 							mixer_incvolume(&mpctx->mixer);
 							mixer_getbothvolume(&mpctx->mixer, &currVolume);
 						  }
 					  }
 					  else if (v < 0)
 					  {
 						  if (mpctx->mixer.muted)
 						  {
 							  mpctx->mixer.last_l -= mpctx->mixer.volstep;
 							  mpctx->mixer.last_r -= mpctx->mixer.volstep;
 							  if (mpctx->mixer.last_l < 0)
 								  mpctx->mixer.last_l = 0;
 							  if (mpctx->mixer.last_r < 0)
 								  mpctx->mixer.last_r = 0;
 							  currVolume = mpctx->mixer.last_l;
 						  }
 						  else
 						  {
 							mixer_decvolume(&mpctx->mixer);
 							mixer_getbothvolume(&mpctx->mixer, &currVolume);
 						  }
 					  }
 					  else
 					  {
 						  if (mpctx->mixer.muted)
 							  currVolume = mpctx->mixer.last_l;
 						  else
 							  mixer_getbothvolume(&mpctx->mixer, &currVolume);
 					  }
 					}
 					mp_msg(MSGT_GLOBAL,MSGL_INFO,"VOLUME=%f\n", currVolume);
 					break;
 				}
 				case MP_CMD_MUTE:
 				{
 				  mixer_mute(&mpctx->mixer);
 				  mp_msg(MSGT_GLOBAL,MSGL_INFO,"MUTED=%d\n", mpctx->mixer.muted);
 				  break;
 				}
 				case MP_CMD_INACTIVE_FILE : {
 					mpctx->stream->activeFileFlag = 0;
 					if (mpctx->stream->cache_data)
 					{
 						cache_vars_t* sc = mpctx->stream->cache_data;
 						sc->streamOriginal->activeFileFlag = 0;
 						sc->stream->activeFileFlag = 0;
 					}				
 				} break;
 				case MP_CMD_ACTIVE_FILE : {
 					mpctx->stream->activeFileFlag = 1;
					if (mpctx->stream->cache_data)
 					{
						cache_vars_t* sc = mpctx->stream->cache_data;
						sc->streamOriginal->activeFileFlag = 1;
						sc->stream->activeFileFlag = 1;
					}				
				} break;
				case MP_CMD_VO_RECTANGLES : {
					int rectData[8] = { cmd->args[0].v.i, cmd->args[1].v.i, cmd->args[2].v.i, cmd->args[3].v.i, 
						cmd->args[4].v.i, cmd->args[5].v.i, cmd->args[6].v.i, cmd->args[7].v.i };
					mpctx->video_out->control(VOCTRL_RECTANGLES, rectData);
					break;
				}
				default:
					usedCmd = 0;
					break;
			 }
			 if (usedCmd)
			 {
				  cmd = mp_input_get_cmd(0,1,0);
				  mp_cmd_free(cmd);
			 }
			 else if (cmd->pausing != 4)
				 break;
		 }
	  }    if (cmd && cmd->id == MP_CMD_PAUSE) {
	cmd = mp_input_get_cmd(0,1,0);
	mp_cmd_free(cmd);
    }
    mpctx->osd_function = OSD_PLAY;
    if (mpctx->audio_out && mpctx->sh_audio) {
        if (mpctx->eof) // do not play remaining audio if we e.g.  switch to the next file
            mpctx->audio_out->reset();
        else
            mpctx->audio_out->resume();  // resume audio
    }
    if (mpctx->video_out && mpctx->sh_video && vo_config_count)
        mpctx->video_out->control(VOCTRL_RESUME, NULL);  // resume video
    (void)GetRelativeTime(); // ignore time that passed during pause
#ifdef CONFIG_GUI
    if (use_gui) {
        if (guiInfo.Playing == GUI_STOP)
            mpctx->eof = 1;
        else
            gui(GUI_SET_STATE, (void *)GUI_PLAY);
    }
#endif
}
Reply With Quote
  #23  
Old 09-03-2015, 07:10 PM
wnjj wnjj is offline
Sage Icon
 
Join Date: Jan 2009
Posts: 1,514
Also, new to GitHub and no account yet but at this rate I'll likely have to get on board.
Reply With Quote
  #24  
Old 09-03-2015, 07:11 PM
wnjj wnjj is offline
Sage Icon
 
Join Date: Jan 2009
Posts: 1,514
Looks like pasting loses all of the tabs and leading whitespace. If the code works I'll try to get you a better copy.
Reply With Quote
  #25  
Old 09-03-2015, 07:12 PM
stuckless's Avatar
stuckless stuckless is offline
SageTVaholic
 
Join Date: Oct 2007
Location: London, Ontario, Canada
Posts: 9,713
can you post the output of
git diff mplayer.c


that might be easier to pull... or, email me the entire file
sean.stuckless at gmail.com
Reply With Quote
  #26  
Old 09-03-2015, 07:19 PM
wnjj wnjj is offline
Sage Icon
 
Join Date: Jan 2009
Posts: 1,514
emailed it.
Reply With Quote
  #27  
Old 09-03-2015, 07:28 PM
stuckless's Avatar
stuckless stuckless is offline
SageTVaholic
 
Join Date: Oct 2007
Location: London, Ontario, Canada
Posts: 9,713
Quote:
Originally Posted by wnjj View Post
emailed it.
Thanks, I'll test it out in the morning.
Reply With Quote
  #28  
Old 09-04-2015, 05:14 AM
stuckless's Avatar
stuckless stuckless is offline
SageTVaholic
 
Join Date: Oct 2007
Location: London, Ontario, Canada
Posts: 9,713
wnjj's fixes, fixed the pause issue, so thanks for that.

Jeff, I have a couple more questions/notes...

1. -subcc now requires an integer argument, although, mplayer will still continue without it.
2. -printcc is no longer a valid mplayer option, and mplayer dies if you pass it

So, for my testing, I've wrapped -subcc and -printcc args around a if (!newmplayer) check so that they don't get passed with the new mplayer build. Not quite sure how to best handle it, and I know that these a closed captioning options, but I'm not sure what the -printcc did and why it was removed... and not sure what I should pass to -subcc.

http://www.mplayerhq.hu/DOCS/man/en/mplayer.1.txt
Quote:
-subcc <1-8>
Display DVD Closed Caption (CC) subtitles from the specified
channel. Values 5 to 8 select a mode that can extract EIA-608
compatibility streams from EIA-708 data. These are not the VOB
subtitles, these are special ASCII subtitles for the hearing im‐
paired encoded in the VOB userdata stream on most region 1 DVDs.
CC subtitles have not been spotted on DVDs from other regions so
far.
So, as of now, I have the miniclient working with a new mplayer, in 64bit mode, and it all appears to be working.

I haven't touched any of the ffmpeg code changes that were done to mplayer, and I'm wondering, if you recall what they might be, since all the files that I've tried, appear to work fine, and I think that mplayer is only used for the miniclient desktop, correct? ie, it's not used by the server, and it's not used by the hardware clients, correct?

And lastly, delivering a new mplayer build, will likely be a big changeset, since there are changes to jtux, miniclient java files, build files, jogl, etc, so I'm not sure if you want the changes in your repo, or if I should just push them to my fork, or the open sagetv fork, etc.

If I get time this weekend, I'm going to push beta 32/64 bit builds of the miniclient packages for linux, so that people can test/play with it.

Also, I think for now, I'm going to simply remove the "seatbelt" check for JTux (which is what I've done for testing) to allow it to operate on 64bit... So far, I haven't seen any issues with JTux, and it it's only lightly used in sagetv, so we might be fine to continue using it for now.
Reply With Quote
  #29  
Old 09-04-2015, 07:01 AM
Tiki's Avatar
Tiki Tiki is offline
Sage Icon
 
Join Date: Feb 2005
Location: Southwest Florida, USA
Posts: 2,009
Quote:
Originally Posted by wnjj View Post
Looks like pasting loses all of the tabs and leading whitespace. If the code works I'll try to get you a better copy.
Just FYI - if you wrap "Code" tags around your code when you paste them into a post, the formatting should stay. The icon that looks like a "#" can be used to insert Code tags. Like this:

Code:
Hello!
    This is some indented code
    Blah blah blah
Goodbye!
__________________
Server: Ryzen 2400G with integrated graphics, ASRock X470 Taichi Motherboard, HDMI output to Vizio 1080p LCD, Win10-64Bit (Professional), 16GB RAM
Capture Devices (7 tuners): Colossus (x1), HDHR Prime (x2)
,USBUIRT (multi-zone)
Source:
Comcast/Xfinity X1 Cable
Primary Client: Server Other Clients: (1) HD200, (1) HD300
Retired Equipment: MediaMVP, PVR150 (x2), PVR150MCE,
HDHR, HVR-2250, HD-PVR
Reply With Quote
  #30  
Old 09-08-2015, 03:42 PM
Narflex's Avatar
Narflex Narflex is offline
Sage
 
Join Date: Feb 2003
Location: Redondo Beach, CA
Posts: 6,349
Quote:
Originally Posted by stuckless View Post
Jeff, I have a couple more questions/notes...

1. -subcc now requires an integer argument, although, mplayer will still continue without it.
2. -printcc is no longer a valid mplayer option, and mplayer dies if you pass it
What this did was print out the CC byte data to stdout, so SageTV could parse it and send it back up to the UI system to use are CC/subtitle renderers. Interestingly enough...there is no 'printcc' in the open source code I released, and also there is no 'CCDATA' string either (which the MiniMPlayerPlugin code looks for). So apparently we had broken CC support in MPlayer awhile back. So don't worry about these anymore.

Quote:
Originally Posted by stuckless View Post
I haven't touched any of the ffmpeg code changes that were done to mplayer, and I'm wondering, if you recall what they might be, since all the files that I've tried, appear to work fine, and I think that mplayer is only used for the miniclient desktop, correct? ie, it's not used by the server, and it's not used by the hardware clients, correct?
MPlayer is used by the Placeshifter for all playback. And it's also used by SageTV/SageTVClient when playing back online videos (although I'm not entirely sure that's needed since I think Qian's demux fixed that awhile back). It has zero usages outside of that.

Quote:
Originally Posted by stuckless View Post
And lastly, delivering a new mplayer build, will likely be a big changeset, since there are changes to jtux, miniclient java files, build files, jogl, etc, so I'm not sure if you want the changes in your repo, or if I should just push them to my fork, or the open sagetv fork, etc.
I'm fine with how you did it so that it pulls MPlayer from the other repo when building it now.
__________________
Jeffrey Kardatzke
Google
Founder of SageTV
Reply With Quote
  #31  
Old 09-08-2015, 03:45 PM
Narflex's Avatar
Narflex Narflex is offline
Sage
 
Join Date: Feb 2003
Location: Redondo Beach, CA
Posts: 6,349
And for the FFMPEG code..I was going to find the one it was based off...but apparently my SageTV PC has taken a turn for the worst and has some really horrible symptoms (very corrupted BIOS screen, and I only saw it get into Windows once which then hung shortly after the splash screen....most of the time you don't even see any video output when booting it). And unfortunately the drives in it were in a RAID array with a hardware RAID card, so I can't just take them out and put them in my Linux workstation to get data off them.

I did have to figure this out again at Google at some point; and it wasn't too terrible to basically do a binary search of various versions until I found the closest one (I did it by focusing on specific files that we would have never edited and then finding the match for that one in its revision history and then repeating that with other files until I figured out where we were actually branched from).
__________________
Jeffrey Kardatzke
Google
Founder of SageTV
Reply With Quote
  #32  
Old 09-08-2015, 06:19 PM
sacrament055 sacrament055 is offline
Sage Aficionado
 
Join Date: Jul 2007
Posts: 474
Quote:
Originally Posted by Narflex View Post
And for the FFMPEG code..I was going to find the one it was based off...but apparently my SageTV PC has taken a turn for the worst and has some really horrible symptoms (very corrupted BIOS screen, and I only saw it get into Windows once which then hung shortly after the splash screen....most of the time you don't even see any video output when booting it). And unfortunately the drives in it were in a RAID array with a hardware RAID card, so I can't just take them out and put them in my Linux workstation to get data off them.

I did have to figure this out again at Google at some point; and it wasn't too terrible to basically do a binary search of various versions until I found the closest one (I did it by focusing on specific files that we would have never edited and then finding the match for that one in its revision history and then repeating that with other files until I figured out where we were actually branched from).
PC problem sounds like if you're lucky it's heat or grounding related. If you extract your motherboard from the case and set it on something non conductive you should be able to figure out if it was grounding out or overheating pretty quickly and it might just boot up.
Reply With Quote
  #33  
Old 09-09-2015, 06:43 AM
stuckless's Avatar
stuckless stuckless is offline
SageTVaholic
 
Join Date: Oct 2007
Location: London, Ontario, Canada
Posts: 9,713
Quote:
Originally Posted by Narflex View Post
MPlayer is used by the Placeshifter for all playback. And it's also used by SageTV/SageTVClient when playing back online videos (although I'm not entirely sure that's needed since I think Qian's demux fixed that awhile back). It has zero usages outside of that.
Just so that I understand the temninology... by "Placeshifter" you mean the apps that run on a normal PC computer, and NOT, the hardware extenders, correct? Or is mplayer used on the hardware extenders as well?

I've been spending ALOT of time in the miniclient code these past few days, looking at everything from mplayer, opengl, communication, etc, and I have a few questions about the architecture. (some of these are statements that you can correct, if they are incorrect)

Mplayer Stuff
When opengl is used, I see there an opengl video renderer as well. So, for opengl, is mplayer NOT rendering the video, but instead, you are reading the frames and rendering the frames yourself by reading them from the domain socket. (-vo:stv)

When opengl is NOT used, it looks it launches the mplayer with a UI and embeds it into the AWT canvas.

For the opengl rendering (if I have it correct), then couldn't the mplayer instance run on the server and forward the frames to a client over a standard IP socket. Could this be used to stream to a mobile device, or would there be a more efficient way to enable this? (would I be porting mplayer to a mobile device to handle playback, etc). You must have some ideas on how video playback is/should be done on a mobile device.

API Stuff
The existing minicient (java) is heavily dependent on AWT (fonts, key events, mouse events, etc). I'm in the process to stripping it apart (separate project) to make it AWT independent with an AWT implementation layer. My goal is to have a base re-usable codebase for the miniclient and then provide an implementation for Android as well. A lot of this work is done, but I have a couple of questions.

GFXCMD (and it's subclasses, for opengl and directx) basically handle all the graphical commands from creating the inital window to drawing artifacts, and handling all the input events. Is the render phase of this code continous, ie, rendering a complete UI 30+ times a second, or is it static where each new command simply adds to the previous state of the screen. I think it's the latter, but I just wanted to be sure.

For JOGL it still uses AWT event system, and it uses AWT Fonts. I was able to get a JOGL renderer working that used opengl input events (some tweaking to bitmasks for even modifiers, since sagetv reliews on the AWT based ones), but then I huge road block on FONTS. It amazed me that opengl doesn't really have a universal text rendering api. JOGL provides a TextRenderer that uses AWT, and it has some newer font rendering that no one else can seem to figure out, incuding me If the JOGL stuff handled the font rendering independent of AWT, then it could have been used, as is, on Android... but as it stands, I can't use it. And of course, since opengl has no universal font rendering, it also means that on opengl on Android ALSO doesn't handle text rendering (easily). There are "hacks" to render text, some of which involves rendering a TextView over the canvas. IN your opengl travels have you come across the font rendering issue, and do you have any suggestions for non AWT based font rendering? I checked out LwGL but it's even in worse shape than JOGL for text rendering.. Kind of. LwGL allows you to create bitmaps of fonts and then you can use a special text renderer that will render the text as bitmaps.

I'll likely have more questions later about MiniCientCOnnection.
Reply With Quote
  #34  
Old 09-09-2015, 12:22 PM
Narflex's Avatar
Narflex Narflex is offline
Sage
 
Join Date: Feb 2003
Location: Redondo Beach, CA
Posts: 6,349
Quote:
Originally Posted by stuckless View Post
Just so that I understand the temninology... by "Placeshifter" you mean the apps that run on a normal PC computer, and NOT, the hardware extenders, correct? Or is mplayer used on the hardware extenders as well?
Yes. In our internal terminology; the 'miniclient' refers to the C app that runs natively on the media extenders. When I say the 'Placeshifter'; I am referring to the Java application that is built from the sage.miniclient sources. Yeah, I'm confusing sometimes...I'm aware of that. Mplayer is NEVER used on the hardware extenders...they have wonderful hardware decoders that solve all those sorts of problems for us with playback.

Quote:
Originally Posted by stuckless View Post
I've been spending ALOT of time in the miniclient code these past few days, looking at everything from mplayer, opengl, communication, etc, and I have a few questions about the architecture. (some of these are statements that you can correct, if they are incorrect)

Mplayer Stuff
When opengl is used, I see there an opengl video renderer as well. So, for opengl, is mplayer NOT rendering the video, but instead, you are reading the frames and rendering the frames yourself by reading them from the domain socket. (-vo:stv)

When opengl is NOT used, it looks it launches the mplayer with a UI and embeds it into the AWT canvas.
Yes, this is all correct.

Quote:
Originally Posted by stuckless View Post
For the opengl rendering (if I have it correct), then couldn't the mplayer instance run on the server and forward the frames to a client over a standard IP socket. Could this be used to stream to a mobile device, or would there be a more efficient way to enable this? (would I be porting mplayer to a mobile device to handle playback, etc). You must have some ideas on how video playback is/should be done on a mobile device.
Errr....no. MPlayer is still DECODING the frames when OpenGL is being used...OpenGL is just the one rendering them to the screen. You can't have the server decode and the clients render. For 720p60 playback that would take 1280*720*60*12bpp (YUV 4:2:0 colorspace) = 663Mbps of bandwidth to transmit the decoded frames.

And for playback on mobile clients; if they can play the format back natively...then just stream using the MediaServer just like SageTV client does (but that won't work out of the home). If you need something at a lower bitrate/resolution; then just use the same streaming that the Placeshifter client uses (that's what we did when we wrote an iOS app years ago that was never released....which is why there's HTTP Live Streaming code inside of SageTV).

Quote:
Originally Posted by stuckless View Post
API Stuff
The existing minicient (java) is heavily dependent on AWT (fonts, key events, mouse events, etc). I'm in the process to stripping it apart (separate project) to make it AWT independent with an AWT implementation layer. My goal is to have a base re-usable codebase for the miniclient and then provide an implementation for Android as well. A lot of this work is done, but I have a couple of questions.
Nice idea...I did this in the SageTV a long time ago as you may have noticed.

Quote:
Originally Posted by stuckless View Post
GFXCMD (and it's subclasses, for opengl and directx) basically handle all the graphical commands from creating the inital window to drawing artifacts, and handling all the input events. Is the render phase of this code continous, ie, rendering a complete UI 30+ times a second, or is it static where each new command simply adds to the previous state of the screen. I think it's the latter, but I just wanted to be sure.
It only renders when there are new UI frames to update which will get pushed from the server at that time. So it's static in the way you're phrasing it.

Quote:
Originally Posted by stuckless View Post
For JOGL it still uses AWT event system, and it uses AWT Fonts. I was able to get a JOGL renderer working that used opengl input events (some tweaking to bitmasks for even modifiers, since sagetv relies on the AWT based ones), but then I huge road block on FONTS. It amazed me that opengl doesn't really have a universal text rendering api. JOGL provides a TextRenderer that uses AWT, and it has some newer font rendering that no one else can seem to figure out, incuding me If the JOGL stuff handled the font rendering independent of AWT, then it could have been used, as is, on Android... but as it stands, I can't use it. And of course, since opengl has no universal font rendering, it also means that on opengl on Android ALSO doesn't handle text rendering (easily). There are "hacks" to render text, some of which involves rendering a TextView over the canvas. IN your opengl travels have you come across the font rendering issue, and do you have any suggestions for non AWT based font rendering? I checked out LwGL but it's even in worse shape than JOGL for text rendering.. Kind of. LwGL allows you to create bitmaps of fonts and then you can use a special text renderer that will render the text as bitmaps.
Yeah...OpenGL isn't designed for rendering text. (although there is text stuff in its APIs) The way that we do text in the server code is by using Freetype. We render the freetype fonts to a buffer (which is essentially an image). And then have a whole MetaFont system in SageTV around that. This uses our RawImage objects (which are basically a non-AWT image with a memory buffer backing it)...and then that's wrapped in the MetaImage layer (again a non-AWT image system that handles all type of image loading, scaling, etc.). The MetaFont system can use freetype (or also the Java AWT Font system) to draw the glyphs to the RawImage. It then has methods for taking SageTV RenderingOps that are text based and converting the strings to a sequence of image copy commands. The RawImage is loaded using the image loading code in the miniclient protocol (which is also the placeshifter protocol) and then the glyphs are drawn using the image drawing commands in the protocol.

It's all quite extensive and took loads of work to get AWT completely out of there. The Sage.jar file on the HD300 has ZERO AWT code in it.

After thinking about this a little more; I would actually encourage you to NOT do this abstraction. The Placeshifter code is not that extensive. The majority of it is based around the specific implementation it is dealing with (such as DirectX, OpenGL or AWT). And for someone porting to Android; it would probably be easier to just write it all again. They could of course copy/paste big chunks of stuff to get most of the protocol handling code in there (like has already been done between the different implementations in there). The whole context for the server dialog on startup would also likely be very different on Android; so that code wouldn't really get reused at all.

The amount of effort it would take you to complete this abstraction would probably be fairly equivalent to the amount of work to create the Android app itself IMHO.
__________________
Jeffrey Kardatzke
Google
Founder of SageTV
Reply With Quote
  #35  
Old 09-09-2015, 12:50 PM
stuckless's Avatar
stuckless stuckless is offline
SageTVaholic
 
Join Date: Oct 2007
Location: London, Ontario, Canada
Posts: 9,713
Quote:
Originally Posted by Narflex View Post
Errr....no. MPlayer is still DECODING the frames when OpenGL is being used...OpenGL is just the one rendering them to the screen. You can't have the server decode and the clients render. For 720p60 playback that would take 1280*720*60*12bpp (YUV 4:2:0 colorspace) = 663Mbps of bandwidth to transmit the decoded frames.
I'm not a "video guy" but yeah, that sounds like a lot of data... so, mplayer is decoding the stream, and opengl is rendering each frame. That's the part I didn't get initially.

Quote:
Originally Posted by Narflex View Post
And for playback on mobile clients; if they can play the format back natively...then just stream using the MediaServer just like SageTV client does (but that won't work out of the home). If you need something at a lower bitrate/resolution; then just use the same streaming that the Placeshifter client uses (that's what we did when we wrote an iOS app years ago that was never released....which is why there's HTTP Live Streaming code inside of SageTV).
So, from an AndroidTV/Mobile point of view, would I need to request and process the video 2 different ways, depending on if I was using the MediaServer to send back data or using the Placeshifter client? Today, in the Java miniclient code, I can see there is a MiniMPlayerPlugin that I think handles the MPLAYER part of decoding the video. I'm just trying to grasp the scope of work that is needed here. I've done hls stuff before, so passing that (as a url) to an existing mediaplayer on android is pretty easy. This is phase 2/3 thing, I still have to get a UI working on android

Quote:
Originally Posted by Narflex View Post
Yeah...OpenGL isn't designed for rendering text. (although there is text stuff in its APIs) The way that we do text in the server code is by using Freetype. We render the freetype fonts to a buffer (which is essentially an image). And then have a whole MetaFont system in SageTV around that. This uses our RawImage objects (which are basically a non-AWT image with a memory buffer backing it)...and then that's wrapped in the MetaImage layer (again a non-AWT image system that handles all type of image loading, scaling, etc.). The MetaFont system can use freetype (or also the Java AWT Font system) to draw the glyphs to the RawImage. It then has methods for taking SageTV RenderingOps that are text based and converting the strings to a sequence of image copy commands. The RawImage is loaded using the image loading code in the miniclient protocol (which is also the placeshifter protocol) and then the glyphs are drawn using the image drawing commands in the protocol.

It's all quite extensive and took loads of work to get AWT completely out of there. The Sage.jar file on the HD300 has ZERO AWT code in it.
I'm a little bit confused here... so are you saying that the DRAWTEXT api in GFXCMD is not used? And, that you handled text transparently on the server and send bitmaps to the Placeshifter? The GFXCMD has loads of code in there around Fonts, caching, etc, and drawing text.

Quote:
Originally Posted by Narflex View Post
After thinking about this a little more; I would actually encourage you to NOT do this abstraction. The Placeshifter code is not that extensive. The majority of it is based around the specific implementation it is dealing with (such as DirectX, OpenGL or AWT). And for someone porting to Android; it would probably be easier to just write it all again. They could of course copy/paste big chunks of stuff to get most of the protocol handling code in there (like has already been done between the different implementations in there). The whole context for the server dialog on startup would also likely be very different on Android; so that code wouldn't really get reused at all.

The amount of effort it would take you to complete this abstraction would probably be fairly equivalent to the amount of work to create the Android app itself IMHO.
[/quote]

In the end, that's exactly what it'll be ie, I've copied the miniclient code to a new project, and I've basically started to gut and shred anything that doesn't fit into my current objective (ie, AWT stuff, Windows stuff, etc). But in the process, I'm trying to understand what is OS dependent and what is UI/AWT dependent, etc, and I'm basically structuring to code, to better support a clearer separation of OS and UI tasks. My goal is really to do this for android, but it'll likely help someone doing this for iOS as well. I don't want to re-write all of the communication code, event handling code, GFXCMD base code, etc, so I've copied them, and then gutted them.
Reply With Quote
  #36  
Old 09-09-2015, 02:41 PM
Taddeusz Taddeusz is offline
SageTVaholic
 
Join Date: Nov 2004
Location: Yukon, OK
Posts: 3,919
Just officially became an Apple developer. Curious whether porting the MiniClient to iOS or utilizing sagex would be the best course? I would like to stay away from third-party dependencies (e.g. plugins) if possible.
__________________
Server: i5 8400, ASUS Prime H370M-Plus/CSM, 16GB RAM, 15TB drive array + 500GB cache, 2 HDHR's, SageTV 9, unRAID 6.6.3
Client 1: HD300 (latest FW), HDMI to an Insignia 65" 1080p LCD and optical SPDIF to a Sony Receiver
Client 2: HD200 (latest FW), HDMI to an Insignia NS-LCD42HD-09 1080p LCD
Reply With Quote
  #37  
Old 09-09-2015, 02:48 PM
Taddeusz Taddeusz is offline
SageTVaholic
 
Join Date: Nov 2004
Location: Yukon, OK
Posts: 3,919
As a side note I believe we'll need to add the ability to encode to H.264 for mobile playback. As far as I can tell MPEG4 Part 2/H.263 playback is not supported on either Android or iOS.
__________________
Server: i5 8400, ASUS Prime H370M-Plus/CSM, 16GB RAM, 15TB drive array + 500GB cache, 2 HDHR's, SageTV 9, unRAID 6.6.3
Client 1: HD300 (latest FW), HDMI to an Insignia 65" 1080p LCD and optical SPDIF to a Sony Receiver
Client 2: HD200 (latest FW), HDMI to an Insignia NS-LCD42HD-09 1080p LCD
Reply With Quote
  #38  
Old 09-09-2015, 03:43 PM
Narflex's Avatar
Narflex Narflex is offline
Sage
 
Join Date: Feb 2003
Location: Redondo Beach, CA
Posts: 6,349
Quote:
Originally Posted by stuckless View Post
So, from an AndroidTV/Mobile point of view, would I need to request and process the video 2 different ways, depending on if I was using the MediaServer to send back data or using the Placeshifter client? Today, in the Java miniclient code, I can see there is a MiniMPlayerPlugin that I think handles the MPLAYER part of decoding the video. I'm just trying to grasp the scope of work that is needed here. I've done hls stuff before, so passing that (as a url) to an existing mediaplayer on android is pretty easy. This is phase 2/3 thing, I still have to get a UI working on android
If you try to use the HLS server (and it still works, I don't know if it does) then you might be able to get away with just passing a URL. However, I think a better approach (since the HLS server is nowhere near as good as the placeshifter transcoder) is to use a 'MediaSource' type of approach (android.media.MediaDataSource) where you push in the media packets yourself and then do things like 'flush' when seeks occur (and the miniclient media protocol sends commands like that).

Quote:
Originally Posted by stuckless View Post
I'm a little bit confused here... so are you saying that the DRAWTEXT api in GFXCMD is not used? And, that you handled text transparently on the server and send bitmaps to the Placeshifter? The GFXCMD has loads of code in there around Fonts, caching, etc, and drawing text.
Yeah, that's not used anymore. That was from before we had the more advanced text system....I also thought at one point it was a better way to do things. But then I found too many problems with having the server calculate text positions/size/bounds using the font logic it had...and then expecting that to match on the client. Too many times it was off by a few pixels and things were getting chopped. It is possible to make it still use the DRAWTEXT command by having the GFX_TEXTMODE property return something besides an empty string or 'None'. But I wouldn't recommend doing that.

Quote:
Originally Posted by stuckless View Post
In the end, that's exactly what it'll be ie, I've copied the miniclient code to a new project, and I've basically started to gut and shred anything that doesn't fit into my current objective (ie, AWT stuff, Windows stuff, etc). But in the process, I'm trying to understand what is OS dependent and what is UI/AWT dependent, etc, and I'm basically structuring to code, to better support a clearer separation of OS and UI tasks. My goal is really to do this for android, but it'll likely help someone doing this for iOS as well. I don't want to re-write all of the communication code, event handling code, GFXCMD base code, etc, so I've copied them, and then gutted them.
Remember the Java code will be of no use on iOS...it'll have to be rewritten in Objective C for that platform.

Quote:
Originally Posted by Taddeusz View Post
As a side note I believe we'll need to add the ability to encode to H.264 for mobile playback. As far as I can tell MPEG4 Part 2/H.263 playback is not supported on either Android or iOS.
That's interesting....I can tell you with 100% certainty that I've placeshifted to an iPhone before (and we never did H264 transcoding). I watched Seinfield in the DFW airport that way when I was going to the FCC in DC for an AllVid session. But adding H264 transcoding shouldn't be all that hard to do....I know x264 has dynamic rate control options in it; and that was the hardest part.
__________________
Jeffrey Kardatzke
Google
Founder of SageTV
Reply With Quote
  #39  
Old 09-09-2015, 03:51 PM
Taddeusz Taddeusz is offline
SageTVaholic
 
Join Date: Nov 2004
Location: Yukon, OK
Posts: 3,919
Quote:
Originally Posted by Narflex View Post
Remember the Java code will be of no use on iOS...it'll have to be rewritten in Objective C for that platform.
Or Swift. If I really start this, and I just became an Apple dev, I will be doing this in Swift.

Quote:
Originally Posted by Narflex View Post
That's interesting....I can tell you with 100% certainty that I've placeshifted to an iPhone before (and we never did H264 transcoding). I watched Seinfield in the DFW airport that way when I was going to the FCC in DC for an AllVid session. But adding H264 transcoding shouldn't be all that hard to do....I know x264 has dynamic rate control options in it; and that was the hardest part.
Everything I've read shows that the built-in frameworks only support H.264. AFAIK, additional decoders have to be used for H.263 playback. Then they also wouldn't have their decoding accelerated by the dedicated hardware usually built for H.264. However, I could always be wrong.
__________________
Server: i5 8400, ASUS Prime H370M-Plus/CSM, 16GB RAM, 15TB drive array + 500GB cache, 2 HDHR's, SageTV 9, unRAID 6.6.3
Client 1: HD300 (latest FW), HDMI to an Insignia 65" 1080p LCD and optical SPDIF to a Sony Receiver
Client 2: HD200 (latest FW), HDMI to an Insignia NS-LCD42HD-09 1080p LCD
Reply With Quote
  #40  
Old 09-09-2015, 03:59 PM
Narflex's Avatar
Narflex Narflex is offline
Sage
 
Join Date: Feb 2003
Location: Redondo Beach, CA
Posts: 6,349
Quote:
Originally Posted by Taddeusz View Post
Everything I've read shows that the built-in frameworks only support H.264. AFAIK, additional decoders have to be used for H.263 playback. Then they also wouldn't have their decoding accelerated by the dedicated hardware usually built for H.264. However, I could always be wrong.
Yeah, but this isn't H.263. It's MPEG4-part2, and H264 is MPEG4-part10. It's also possible they've changed support over the years....I played MPEG4 on iOS about 4.5 years ago.
__________________
Jeffrey Kardatzke
Google
Founder of SageTV
Reply With Quote
Reply


Currently Active Users Viewing This Thread: 1 (0 members and 1 guests)
 

Posting Rules
You may not post new threads
You may not post replies
You may not post attachments
You may not edit your posts

BB code is On
Smilies are On
[IMG] code is On
HTML code is Off

Forum Jump

Similar Threads
Thread Thread Starter Forum Replies Last Post
Linux 64bit MiniClient (issues with JTux on 64bit) stuckless SageTV Github Development 23 08-19-2015 12:54 PM
OSD doesn't work in Linux miniclient 6.6 Grant.Edwards SageTV Linux 1 08-27-2011 04:37 PM
Linux Placeshifter/Miniclient Fails scsever SageTV Linux 1 08-23-2011 10:20 PM
mplayer opens in new window in Linux Spectrum SageTV Linux 3 08-18-2009 07:08 AM
Running Linux Miniclient on Xbox xred SageTV Placeshifter 0 03-06-2007 12:28 AM


All times are GMT -6. The time now is 03:50 PM.


Powered by vBulletin® Version 3.8.11
Copyright ©2000 - 2023, vBulletin Solutions Inc.
Copyright 2003-2005 SageTV, LLC. All rights reserved.